diff --git a/.circleci/config.yml b/.circleci/config.yml index cfbed9d1d76..fb0ea6e4b9a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -358,15 +358,15 @@ jobs: command: build yarn-project-prod | add_timestamps yarn-project-formatting: - machine: - image: ubuntu-2204:2023.07.2 - resource_class: large + docker: + - image: aztecprotocol/alpine-build-image + resource_class: small steps: - *checkout - *setup_env - run: name: Check Formatting - command: cond_run_container yarn-project formatting | add_timestamps + command: cond_spot_run_container yarn-project 8 formatting | add_timestamps yarn-project-tests: docker: @@ -917,6 +917,11 @@ jobs: command: | should_release || exit 0 yarn-project/deploy_npm.sh latest + - run: + name: "l1-contracts" + command: | + should_release || exit 0 + deploy_npm l1-contracts release-dockerhub: machine: diff --git a/.nvmrc b/.nvmrc index 39e593ebeee..eb800ed459a 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v18.8.0 +v18.19.0 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bf4932462d0..a655b1913f8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,5 +1,5 @@ { - ".": "0.16.1", - "barretenberg": "0.16.1", - "barretenberg/ts": "0.16.1" + ".": "0.16.7", + "barretenberg": "0.16.7", + "barretenberg/ts": "0.16.7" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bcf97225367..956258bb1b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,179 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.6...aztec-packages-v0.16.7) (2023-12-06) + + +### Features + +* Encapsulated Goblin ([#3524](https://github.com/AztecProtocol/aztec-packages/issues/3524)) ([2f08423](https://github.com/AztecProtocol/aztec-packages/commit/2f08423e37942f991634fe6c45de52feb1f159cf)) + + +### Bug Fixes + +* Extract whole archive instead of subset ([#3604](https://github.com/AztecProtocol/aztec-packages/issues/3604)) ([cb000d8](https://github.com/AztecProtocol/aztec-packages/commit/cb000d828dcea0ec5025bceadd322b1d260c0111)) + + +### Documentation + +* **yellow-paper:** Note hash, nullifier, and public data trees ([#3518](https://github.com/AztecProtocol/aztec-packages/issues/3518)) ([0e2db8b](https://github.com/AztecProtocol/aztec-packages/commit/0e2db8b0a819dfe44dd5c76ff89aaa1f403d2071)) + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.5...aztec-packages-v0.16.6) (2023-12-06) + + +### Bug Fixes + +* **pxe:** Initialise aztecjs on pxe startup ([#3601](https://github.com/AztecProtocol/aztec-packages/issues/3601)) ([ceb2ed2](https://github.com/AztecProtocol/aztec-packages/commit/ceb2ed2618398c6af56e69ec0a9f58b808547f30)) +* Remove api_prefix local ([#3599](https://github.com/AztecProtocol/aztec-packages/issues/3599)) ([0d8dd8d](https://github.com/AztecProtocol/aztec-packages/commit/0d8dd8d14fa002b4dadcd7ea70e01c5b263edaee)) + + +### Miscellaneous + +* **yellow_paper:** Fixes to my work on public private messages ([#3507](https://github.com/AztecProtocol/aztec-packages/issues/3507)) ([33a4f63](https://github.com/AztecProtocol/aztec-packages/commit/33a4f63dc8004d144d891fb8016d85471c64e880)) + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.4...aztec-packages-v0.16.5) (2023-12-06) + + +### Features + +* Add EFS file storage to devnet nodes ([#3584](https://github.com/AztecProtocol/aztec-packages/issues/3584)) ([5b590eb](https://github.com/AztecProtocol/aztec-packages/commit/5b590eb06fab7ecfcd62aa78a04e035dc8db6b41)) + + +### Bug Fixes + +* **ci:** Aztec node devnet healthchecks ([#3598](https://github.com/AztecProtocol/aztec-packages/issues/3598)) ([1a9d742](https://github.com/AztecProtocol/aztec-packages/commit/1a9d742cb21ea71df33eb8931b0faecc96e84508)) +* **ci:** Count for EFS AZ2 ([#3597](https://github.com/AztecProtocol/aztec-packages/issues/3597)) ([d427bca](https://github.com/AztecProtocol/aztec-packages/commit/d427bca1c53aacc499f0895bb172f88d96e9347e)) +* **ci:** L1-contracts npm release ([#3596](https://github.com/AztecProtocol/aztec-packages/issues/3596)) ([008df50](https://github.com/AztecProtocol/aztec-packages/commit/008df5018e8f924ac93ad5d9d712727c51952c54)) +* **ci:** Node health-check + contract address env vars ([#3578](https://github.com/AztecProtocol/aztec-packages/issues/3578)) ([fffc700](https://github.com/AztecProtocol/aztec-packages/commit/fffc7007cf5a5fb5e721c63d4abff5184d40c9c0)) + + +### Miscellaneous + +* Make noir-circuit independent of aztec-nr ([#3591](https://github.com/AztecProtocol/aztec-packages/issues/3591)) ([3013354](https://github.com/AztecProtocol/aztec-packages/commit/301335479f45837e61e1b434566dff98a0867a37)) +* Remove foundation and types deps from boxes ([#3389](https://github.com/AztecProtocol/aztec-packages/issues/3389)) ([eade352](https://github.com/AztecProtocol/aztec-packages/commit/eade352a56b2365b5213962733735e45a6d46fb0)) +* Renaming blockstree to archive ([#3569](https://github.com/AztecProtocol/aztec-packages/issues/3569)) ([6c200e9](https://github.com/AztecProtocol/aztec-packages/commit/6c200e932b6a4bb218059e7b9f92f97c70aa8195)) +* Trivial change roundup ([#3556](https://github.com/AztecProtocol/aztec-packages/issues/3556)) ([ff893b2](https://github.com/AztecProtocol/aztec-packages/commit/ff893b236091b480b6de18ebaab57c62dcdfe1d4)) + + +### Documentation + +* Add libstdc++-12-dev to setup instructions ([#3585](https://github.com/AztecProtocol/aztec-packages/issues/3585)) ([9773e8c](https://github.com/AztecProtocol/aztec-packages/commit/9773e8c3b4789f0dd6b5fdaf0f283b9bd7c9812f)) + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.3...aztec-packages-v0.16.4) (2023-12-05) + + +### Bug Fixes + +* **ci:** Separate step for l1-contracts npm release ([#3581](https://github.com/AztecProtocol/aztec-packages/issues/3581)) ([7745975](https://github.com/AztecProtocol/aztec-packages/commit/7745975731a009c9010291b9174d321941754760)) + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.2...aztec-packages-v0.16.3) (2023-12-05) + + +### Bug Fixes + +* Npm release of l1-contracts ([#3571](https://github.com/AztecProtocol/aztec-packages/issues/3571)) ([487419b](https://github.com/AztecProtocol/aztec-packages/commit/487419be549903a3d42b1232cce02139b2ac556f)) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + + +### Documentation + +* Documenting issue with `context.block_header` ([#3565](https://github.com/AztecProtocol/aztec-packages/issues/3565)) ([1237e26](https://github.com/AztecProtocol/aztec-packages/commit/1237e2658d90114c03a6b838cbab80005aa3a661)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.1...aztec-packages-v0.16.2) (2023-12-05) + + +### Features + +* Add tree snapshots ([#3468](https://github.com/AztecProtocol/aztec-packages/issues/3468)) ([7a86bb3](https://github.com/AztecProtocol/aztec-packages/commit/7a86bb3a5e2bd9db60c1b70e11ced29deca83ff6)) +* **AVM:** First version for mini AVM (ADD, RETURN, CALLDATACOPY) ([#3439](https://github.com/AztecProtocol/aztec-packages/issues/3439)) ([b3af146](https://github.com/AztecProtocol/aztec-packages/commit/b3af1463ed6b7858252ab4779f8c747a6de47363)) +* Circuit optimized indexed tree batch insertion ([#3367](https://github.com/AztecProtocol/aztec-packages/issues/3367)) ([187d2f7](https://github.com/AztecProtocol/aztec-packages/commit/187d2f79d9390e43ec2e2ce6a0db0d6718cc1716)) +* Devnet ([#3473](https://github.com/AztecProtocol/aztec-packages/issues/3473)) ([97c40c2](https://github.com/AztecProtocol/aztec-packages/commit/97c40c26098dc615e95e8555458401afc88d9516)) +* **docs:** Add simple private voting tutorial ([#3402](https://github.com/AztecProtocol/aztec-packages/issues/3402)) ([a6e0352](https://github.com/AztecProtocol/aztec-packages/commit/a6e035275fc07f11d0354d0794eaa15d937ba278)) +* **docs:** Document slow update tree ([#3416](https://github.com/AztecProtocol/aztec-packages/issues/3416)) ([8e9f103](https://github.com/AztecProtocol/aztec-packages/commit/8e9f10349936ee414526915a93f4ec1070de17e4)) +* Flavor refactor, reduce duplication ([#3407](https://github.com/AztecProtocol/aztec-packages/issues/3407)) ([8d6b013](https://github.com/AztecProtocol/aztec-packages/commit/8d6b01304d797f7cbb576b23a7e115390d113c79)) +* Inclusion and non-inclusion proofs experiment ([#3255](https://github.com/AztecProtocol/aztec-packages/issues/3255)) ([b911e65](https://github.com/AztecProtocol/aztec-packages/commit/b911e6546bea5b3e2301b02459c5db8a1ff9024e)), closes [#2572](https://github.com/AztecProtocol/aztec-packages/issues/2572) [#2584](https://github.com/AztecProtocol/aztec-packages/issues/2584) +* New Poseidon2 circuit builder gates ([#3346](https://github.com/AztecProtocol/aztec-packages/issues/3346)) ([91cb369](https://github.com/AztecProtocol/aztec-packages/commit/91cb369aa7ecbf457965f53057cafa2c2e6f1214)) +* New Poseidon2 relations ([#3406](https://github.com/AztecProtocol/aztec-packages/issues/3406)) ([14b9736](https://github.com/AztecProtocol/aztec-packages/commit/14b9736925c6da33133bd24ee283fb4c199082a5)) +* Pull latest noir for brillig optimizations ([#3464](https://github.com/AztecProtocol/aztec-packages/issues/3464)) ([d356bac](https://github.com/AztecProtocol/aztec-packages/commit/d356bac740d203fbb9363a0127ca1d433358e029)) +* Refactor StandardIndexedTree for abstract leaves and preimages and optimized it ([#3530](https://github.com/AztecProtocol/aztec-packages/issues/3530)) ([63b9cdc](https://github.com/AztecProtocol/aztec-packages/commit/63b9cdc5823df540c73b3e53d8e3c4117deb3b02)) +* Removing historical roots from circuits ([#3544](https://github.com/AztecProtocol/aztec-packages/issues/3544)) ([9f682cb](https://github.com/AztecProtocol/aztec-packages/commit/9f682cb8cf37eb392c4979f62fdec7126fb4d102)) +* Seperate pil files for sub machines ([#3454](https://github.com/AztecProtocol/aztec-packages/issues/3454)) ([d09d6f5](https://github.com/AztecProtocol/aztec-packages/commit/d09d6f5a5f2c7e2a58658a640a6a6d6ba4294701)) +* Throw compile time error if contract has too many fns ([#3536](https://github.com/AztecProtocol/aztec-packages/issues/3536)) ([ad66ad0](https://github.com/AztecProtocol/aztec-packages/commit/ad66ad0811181def6ef13c646acfc06261958787)) +* Use tree snapshots in aztec-node/pxe/oracles ([#3504](https://github.com/AztecProtocol/aztec-packages/issues/3504)) ([6e40427](https://github.com/AztecProtocol/aztec-packages/commit/6e4042757feb852dca77c957fc52f41e5b30f848)) +* Yellow paper cross-chain communication ([#3477](https://github.com/AztecProtocol/aztec-packages/issues/3477)) ([d51df8c](https://github.com/AztecProtocol/aztec-packages/commit/d51df8cf6d756e03ffa577b9e35b92a9b723e6c1)) + + +### Bug Fixes + +* Check version, chainid and sender for cross-chain l1 to l2 msgs ([#3457](https://github.com/AztecProtocol/aztec-packages/issues/3457)) ([d251703](https://github.com/AztecProtocol/aztec-packages/commit/d251703213c42c427ed3e0f8ff1098edf3b6a2e3)) +* **ci:** Add DEPLOY_TAG in fork log group ([#3510](https://github.com/AztecProtocol/aztec-packages/issues/3510)) ([f021041](https://github.com/AztecProtocol/aztec-packages/commit/f02104136f2d98325baa21792ea10245abffab76)) +* **ci:** Check if l1 contracts img has been deployed ([#3531](https://github.com/AztecProtocol/aztec-packages/issues/3531)) ([ac1f03c](https://github.com/AztecProtocol/aztec-packages/commit/ac1f03c995457df161ce59b181664950871b6436)) +* **ci:** Comment out LB listeners (for now) ([#3519](https://github.com/AztecProtocol/aztec-packages/issues/3519)) ([640aabc](https://github.com/AztecProtocol/aztec-packages/commit/640aabc414876a3dacb5287e2705380a9fafca9f)) +* **ci:** Count for bootnode discovery service ([#3517](https://github.com/AztecProtocol/aztec-packages/issues/3517)) ([2a38788](https://github.com/AztecProtocol/aztec-packages/commit/2a38788ee7857162a9af391323f53187e670dedc)) +* **ci:** Define REPOSITORY in deploy_l1_contracts ([#3514](https://github.com/AztecProtocol/aztec-packages/issues/3514)) ([b246d1b](https://github.com/AztecProtocol/aztec-packages/commit/b246d1ba3a899af5e7566944a9d79be62827cdd5)) +* **ci:** Don't deploy to npm on master merge ([#3502](https://github.com/AztecProtocol/aztec-packages/issues/3502)) ([a138860](https://github.com/AztecProtocol/aztec-packages/commit/a138860bf4032be9688c5ffb5d95b12bcb6d459e)) +* **ci:** Env vars for deploying l1-contracts ([#3513](https://github.com/AztecProtocol/aztec-packages/issues/3513)) ([27106b2](https://github.com/AztecProtocol/aztec-packages/commit/27106b2e2845cb32ea229a8527b86a691a668f20)) +* **ci:** Export FORK_API_KEY from setup_env ([#3512](https://github.com/AztecProtocol/aztec-packages/issues/3512)) ([7e81e2c](https://github.com/AztecProtocol/aztec-packages/commit/7e81e2c53deaf2b5efcc6b0567fc1240540471eb)) +* **ci:** Fix docker architecture for devnet packages ([#3505](https://github.com/AztecProtocol/aztec-packages/issues/3505)) ([66d0287](https://github.com/AztecProtocol/aztec-packages/commit/66d02879a33ded27e188b90b1d7ac6b551830acc)) +* **ci:** Fix faucet vars + don't deploy contracts from node ([#3553](https://github.com/AztecProtocol/aztec-packages/issues/3553)) ([c7176f6](https://github.com/AztecProtocol/aztec-packages/commit/c7176f6c04486a3f261a48958ccadba684f33521)) +* **ci:** L1 contracts directories ([#3545](https://github.com/AztecProtocol/aztec-packages/issues/3545)) ([63dd0c8](https://github.com/AztecProtocol/aztec-packages/commit/63dd0c8852ca7605a2407458b355b3776a96b37c)) +* **ci:** Login to ecr to fetch contracts image ([#3538](https://github.com/AztecProtocol/aztec-packages/issues/3538)) ([b033538](https://github.com/AztecProtocol/aztec-packages/commit/b0335383c884d81562c2911ecae9d889f1076254)) +* **ci:** Remove unused ADDRESS vars & export private key vars ([#3520](https://github.com/AztecProtocol/aztec-packages/issues/3520)) ([d889359](https://github.com/AztecProtocol/aztec-packages/commit/d8893590a8f6f7b1d0a60279a6a2bc9fd0b5c154)) +* **ci:** Set default value for $TO_TAINT ([#3508](https://github.com/AztecProtocol/aztec-packages/issues/3508)) ([8b6688a](https://github.com/AztecProtocol/aztec-packages/commit/8b6688a7975a748f910f67ee17dbc61fd1df7001)) +* **ci:** Terraform listener resources ([#3534](https://github.com/AztecProtocol/aztec-packages/issues/3534)) ([c3b9cce](https://github.com/AztecProtocol/aztec-packages/commit/c3b9cce96599451fce79fd3318176da4708bfa6a)) +* **ci:** Terraform_deploy for devnet ([#3516](https://github.com/AztecProtocol/aztec-packages/issues/3516)) ([ba3803e](https://github.com/AztecProtocol/aztec-packages/commit/ba3803ec7c208804f8da5ee81b9989f4640a2fc1)) +* **ci:** Tf variable references & formatting([#3522](https://github.com/AztecProtocol/aztec-packages/issues/3522)) ([d37cf52](https://github.com/AztecProtocol/aztec-packages/commit/d37cf520348e17acdc9de93bc2cf83560ccf57d5)) +* Disable e2e-slow-tree ([#3459](https://github.com/AztecProtocol/aztec-packages/issues/3459)) ([5927103](https://github.com/AztecProtocol/aztec-packages/commit/59271039b3a087a4f33b11701929cebf2eadb61d)) +* **docs:** Update package name of aztec-cli ([#3474](https://github.com/AztecProtocol/aztec-packages/issues/3474)) ([98d7ba0](https://github.com/AztecProtocol/aztec-packages/commit/98d7ba0c1d8c809f1bcb05e517412f99e46f95ae)) +* Double slash in deployed faucet routes ([#3555](https://github.com/AztecProtocol/aztec-packages/issues/3555)) ([6c704a5](https://github.com/AztecProtocol/aztec-packages/commit/6c704a5502746e8a002e039ce8c73e8e207ca9d0)) +* Faucet lb_listener priority ([#3554](https://github.com/AztecProtocol/aztec-packages/issues/3554)) ([3f56dd7](https://github.com/AztecProtocol/aztec-packages/commit/3f56dd7cacfda0eb7a4bf0c38ec804a85e6881d2)) +* Handling low_nullifier.next_value equal to 0 ([#3562](https://github.com/AztecProtocol/aztec-packages/issues/3562)) ([c800502](https://github.com/AztecProtocol/aztec-packages/commit/c8005023d80a2a4e15d3a3bea10072371e3c5842)), closes [#3550](https://github.com/AztecProtocol/aztec-packages/issues/3550) +* Remove x86_64 form l1-contracts img tag ([#3549](https://github.com/AztecProtocol/aztec-packages/issues/3549)) ([6828f1a](https://github.com/AztecProtocol/aztec-packages/commit/6828f1ac33755ca6ccf42096d741d5ea326dae66)) +* Throw error if fn sig has whitespaces ([#3509](https://github.com/AztecProtocol/aztec-packages/issues/3509)) ([7671063](https://github.com/AztecProtocol/aztec-packages/commit/7671063a2cb32c45a751c33f6ed5e1b8bea8608f)), closes [#3055](https://github.com/AztecProtocol/aztec-packages/issues/3055) + + +### Miscellaneous + +* (yellow paper) public-vm section of yellow paper ([#3493](https://github.com/AztecProtocol/aztec-packages/issues/3493)) ([8ff3780](https://github.com/AztecProtocol/aztec-packages/commit/8ff378005f78126260cb0950a8167ec40efd14aa)) +* Add mermaid diagram support ([#3499](https://github.com/AztecProtocol/aztec-packages/issues/3499)) ([537d552](https://github.com/AztecProtocol/aztec-packages/commit/537d552009676a7dfed2d75e7f73a572591699af)) +* Add yellow paper build check to CI ([#3490](https://github.com/AztecProtocol/aztec-packages/issues/3490)) ([3ebd2f2](https://github.com/AztecProtocol/aztec-packages/commit/3ebd2f25646c7db170d22c62f41888d0c417d644)) +* **avm:** Enable AVM unit tests in CI ([#3463](https://github.com/AztecProtocol/aztec-packages/issues/3463)) ([051dda9](https://github.com/AztecProtocol/aztec-packages/commit/051dda9c50f1d9f11f5063ddf51c9986a6998b43)), closes [#3461](https://github.com/AztecProtocol/aztec-packages/issues/3461) +* **bb:** Pointer_view to reference-based get_all ([#3495](https://github.com/AztecProtocol/aztec-packages/issues/3495)) ([50d7327](https://github.com/AztecProtocol/aztec-packages/commit/50d73271919306a05ac3a7c2e7d37363b6761248)) +* **bb:** Reuse entities from GoblinUltra in GoblinUltraRecursive ([#3521](https://github.com/AztecProtocol/aztec-packages/issues/3521)) ([8259636](https://github.com/AztecProtocol/aztec-packages/commit/8259636c016c0adecb052f176e78444fb5481c38)) +* Build the acir test vectors as part of CI. ([#3447](https://github.com/AztecProtocol/aztec-packages/issues/3447)) ([1a2d1f8](https://github.com/AztecProtocol/aztec-packages/commit/1a2d1f822d0e1fabd322c2c4d0473629edd56380)) +* Containers reduced to ~100MB total. ~30s installation. ([#3487](https://github.com/AztecProtocol/aztec-packages/issues/3487)) ([b49cef2](https://github.com/AztecProtocol/aztec-packages/commit/b49cef21e30f06bce23f421b533e64728278cbf8)) +* **docs:** Fix broken Noir stdlib link ([#3496](https://github.com/AztecProtocol/aztec-packages/issues/3496)) ([787d59a](https://github.com/AztecProtocol/aztec-packages/commit/787d59a1a583788773a0e5d75a9079328ce2a21d)) +* Field-agnostic and reusable transcript ([#3433](https://github.com/AztecProtocol/aztec-packages/issues/3433)) ([d78775a](https://github.com/AztecProtocol/aztec-packages/commit/d78775adb9574a3d76c3fca8cf940cdef460ae10)) +* Fix broken link in txs in yellow paper ([#3484](https://github.com/AztecProtocol/aztec-packages/issues/3484)) ([798565d](https://github.com/AztecProtocol/aztec-packages/commit/798565d5a8a5cb096c9b2efb6d41de1c449d2c4e)) +* Fix yellow paper build error ([32881a4](https://github.com/AztecProtocol/aztec-packages/commit/32881a4d0912e0287b558a4785b6d60c50f84335)) +* Fixed typo in build system ([#3501](https://github.com/AztecProtocol/aztec-packages/issues/3501)) ([3a80ac2](https://github.com/AztecProtocol/aztec-packages/commit/3a80ac2caf5f1f847f5e6b2a7b526b81a211de29)) +* Increase functions per contract from 16 to 32 ([#3503](https://github.com/AztecProtocol/aztec-packages/issues/3503)) ([ebdeea3](https://github.com/AztecProtocol/aztec-packages/commit/ebdeea3f4bc721d5708b44ba1f89ba24eb0e25d5)) +* Naming fixes ([#3476](https://github.com/AztecProtocol/aztec-packages/issues/3476)) ([1db30bf](https://github.com/AztecProtocol/aztec-packages/commit/1db30bf0d61a7b2920ab1aedaef58bc0922ec78e)) +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) +* Recursive verifier updates ([#3452](https://github.com/AztecProtocol/aztec-packages/issues/3452)) ([dbb4a12](https://github.com/AztecProtocol/aztec-packages/commit/dbb4a1205528bdd8217ea2d15ccf060e2aa9b7d2)) +* Refactor `WitnessEntities` to be able to derive `WitnessCommitments` from it ([#3479](https://github.com/AztecProtocol/aztec-packages/issues/3479)) ([9c9b561](https://github.com/AztecProtocol/aztec-packages/commit/9c9b561f392de5fce11cefe4d72e4f33f2567f41)) +* Remove temporary logging ([#3466](https://github.com/AztecProtocol/aztec-packages/issues/3466)) ([8c8387b](https://github.com/AztecProtocol/aztec-packages/commit/8c8387b6b18335ca23f62c3d4c942415b7449462)) +* Transcript handled through shared_ptr ([#3434](https://github.com/AztecProtocol/aztec-packages/issues/3434)) ([30fca33](https://github.com/AztecProtocol/aztec-packages/commit/30fca3307ee7e33d81fd51c3d280c6362baef0b9)) +* Typo fixes ([#3488](https://github.com/AztecProtocol/aztec-packages/issues/3488)) ([d9a44dc](https://github.com/AztecProtocol/aztec-packages/commit/d9a44dc2e655752e1c6503ac85b64169ec7e4754)) +* **yellow_paper:** Public<>private messaging ([#3491](https://github.com/AztecProtocol/aztec-packages/issues/3491)) ([6ecc406](https://github.com/AztecProtocol/aztec-packages/commit/6ecc406159a022e5d57267dcaea48e0df25bbda0)) + + +### Documentation + +* Add transaction section to yellow paper ([#3418](https://github.com/AztecProtocol/aztec-packages/issues/3418)) ([44bf30b](https://github.com/AztecProtocol/aztec-packages/commit/44bf30b0af5a546e375d068790e9fa7e94d6ca52)) +* Apply comments from Jan on contracts ([#3539](https://github.com/AztecProtocol/aztec-packages/issues/3539)) ([e351873](https://github.com/AztecProtocol/aztec-packages/commit/e351873cadb5cbca5d1d61016e6f9a9e7479bff9)) +* Fees update in yellow paper ([#3486](https://github.com/AztecProtocol/aztec-packages/issues/3486)) ([a8b2608](https://github.com/AztecProtocol/aztec-packages/commit/a8b26086306bfec6e7808f4858a08644e84336f4)) +* First go at generated AVM instruction set doc ([#3469](https://github.com/AztecProtocol/aztec-packages/issues/3469)) ([8cc54a4](https://github.com/AztecProtocol/aztec-packages/commit/8cc54a48917ff319a5c2b706e01cfbf5ebca013e)) +* Further update to the yellow paper ([#3542](https://github.com/AztecProtocol/aztec-packages/issues/3542)) ([751bb6a](https://github.com/AztecProtocol/aztec-packages/commit/751bb6a2075705931b3035117512a93769142707)) +* Yellow paper updates ([#3478](https://github.com/AztecProtocol/aztec-packages/issues/3478)) ([11f754d](https://github.com/AztecProtocol/aztec-packages/commit/11f754d256cc164ca2d50b9923aeba1612e7f48b)) +* Yellow paper updates for private message delivery ([#3472](https://github.com/AztecProtocol/aztec-packages/issues/3472)) ([6ba9e18](https://github.com/AztecProtocol/aztec-packages/commit/6ba9e18820c85acca692d2af03e4d800c29ab6dc)) +* **yellow-paper:** Sync, enqueued, and static calls ([#3494](https://github.com/AztecProtocol/aztec-packages/issues/3494)) ([00835c6](https://github.com/AztecProtocol/aztec-packages/commit/00835c67b460074fe16e19b27a47ac37273e743b)), closes [#3108](https://github.com/AztecProtocol/aztec-packages/issues/3108) +* **yellowpaper:** Instruction set updates and fixes ([#3515](https://github.com/AztecProtocol/aztec-packages/issues/3515)) ([bfb61dd](https://github.com/AztecProtocol/aztec-packages/commit/bfb61dd1412e856adc912f0e3133cd6f8c9e8fbf)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.0...aztec-packages-v0.16.1) (2023-11-28) diff --git a/aztec-up/.gitignore b/aztec-up/.gitignore new file mode 100644 index 00000000000..5fd438ece3e --- /dev/null +++ b/aztec-up/.gitignore @@ -0,0 +1,3 @@ +.terraform +.terraform* +.DS_Store \ No newline at end of file diff --git a/aztec-up/README.md b/aztec-up/README.md new file mode 100644 index 00000000000..630d0e0fc5c --- /dev/null +++ b/aztec-up/README.md @@ -0,0 +1,30 @@ +# The Aztec Installation Script + +``` +bash -i <(curl -s install.aztec.network) +``` + +That is all. + +This will install into `~/.aztec/bin` a collection of scripts to help running aztec containers, and will update +a users `PATH` variable in their shell startup script so they can be found. + +- `aztec` - The infrastructure container. +- `aztec-cli` - A command line tool for interacting with infrastructure. +- `aztec-nargo` - A build of `nargo` from `noir` that is guaranteed to be version aligned. Provides compiler, lsp and more. +- `aztec-sandbox` - A wrapper around docker-compose that launches services needed for sandbox testing. +- `aztec-up` - A tool to upgrade the aztec toolchain to the latest, or specific versions. + +After installed, you can use `aztec-up` to upgrade or install specific versions. + +``` +VERSION=master aztec-up +``` + +This will install the container built from master branch. + +``` +VERSION=v1.2.3 aztec-up +``` + +This will install tagged release version 1.2.3. diff --git a/aztec-up/bin/.aztec-run b/aztec-up/bin/.aztec-run new file mode 100755 index 00000000000..a02a3bafbd9 --- /dev/null +++ b/aztec-up/bin/.aztec-run @@ -0,0 +1,84 @@ +#!/usr/bin/env bash +# The script starts a Docker container passing any commands and arguments to the command running inside the container. +# It handles mounting paths into the container. +# It handles networking comms back to the host. +set -euo pipefail + +IMAGE=${1:-} +shift + +VERSION=${VERSION:-"latest"} + +# Any host bindings we might send to the container. +DOCKER_HOST_BINDS="" + +# Volumes to pass to the container. +DOCKER_VOLUME="-v $HOME:/root" + +# Colors. +y="\033[33m" +r="\033[0m" + +function warn { + echo -e "${y}$1${r}" +} + +if ! command -v docker &> /dev/null; then + warn "No docker found." + exit 1 +fi + +if [[ $PWD != ${HOME}* ]]; then + warn "Due to how we containerize our applications, we require your working directory to be somewhere within $HOME." + exit 1 +fi + +# Set up host.docker.internal alias on Linux, just like it is on mac. +UNAME=$(uname -s) +if [ "$UNAME" == "Linux" ]; then + if docker info 2>/dev/null | grep -q rootless; then + # We're in rootless docker. Probe for the host ip and use that. + ip=$(hostname -I | head | tr -d ' ') + warn "WARNING: Running within rootless docker. Using $ip as host ip. Ensure listening services are listening on this interface." + DOCKER_HOST_BINDS="$DOCKER_HOST_BINDS --add-host host.docker.internal:$ip" + else + DOCKER_HOST_BINDS="$DOCKER_HOST_BINDS --add-host host.docker.internal:host-gateway" + fi +fi + +# Substitute any references to localhost with our host gateway. +# TODO: In node, we can hook the resolve override for localhost with host.docker.internal. +# Consider if we should just do that, but that wouldn't help e.g. nargo. +args=("$@") +for i in "${!args[@]}"; do + args[$i]=${args[$i]//localhost/host.docker.internal} +done + +# Check if it's either a filename or a directory that exists outside the HOME. +# If so, warn and exit. +for i in "${!args[@]}"; do + arg=${args[$i]} + if [[ -f "$arg" || -d "$arg" && $(realpath $arg) != ${HOME}* ]]; then + warn "Due to how we containerize our applications, paths outside of $HOME cannot be referenced." + exit 1 + fi +done + +DOCKER_ENV="" +for env in ${ENV_VARS_TO_INJECT:-}; do + # First substitute any reference to localhost with our host gateway. + env=${env//localhost/host.docker.internal} + # Inject into container. + DOCKER_ENV+="-e $env:${!env:-} " +done + +DOCKER_VOLUME="$DOCKER_VOLUME -v cache:/cache" + +docker run \ + -ti \ + --rm \ + --workdir "${PWD/$HOME/\/root}" \ + $DOCKER_HOST_BINDS \ + $DOCKER_ENV \ + $DOCKER_VOLUME \ + $IMAGE:$VERSION ${args[@]:-} diff --git a/aztec-up/bin/aztec b/aztec-up/bin/aztec new file mode 100755 index 00000000000..30ef8a66fab --- /dev/null +++ b/aztec-up/bin/aztec @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail + +$(dirname $0)/.aztec-run aztecprotocol/aztec-sandbox $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-cli b/aztec-up/bin/aztec-cli new file mode 100755 index 00000000000..7d8b75f4146 --- /dev/null +++ b/aztec-up/bin/aztec-cli @@ -0,0 +1,9 @@ +#!/usr/bin/env bash +# TODO: Make compile command always be wasm. Or put nargo in container. Or probe. +# TODO: Make unbox fail if trying to unbox outside of the cwd. +set -euo pipefail + +export ENV_VARS_TO_INJECT="PXE_URL PRIVATE_KEY DEBUG" +export PXE_URL=${PXE_URL:-"http://host.docker.internal:8080"} + +$(dirname $0)/.aztec-run aztecprotocol/cli $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-install b/aztec-up/bin/aztec-install new file mode 100755 index 00000000000..11eaada215d --- /dev/null +++ b/aztec-up/bin/aztec-install @@ -0,0 +1,163 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Colors +g="\033[32m" # Green +y="\033[33m" # Yellow +b="\033[34m" # Blue +p="\033[35m" # Purple +r="\033[0m" # Reset +bold="\033[1m" + +# Function to replace characters and add color +function print_colored() { + local b=$'\033[34m' # Blue + local y=$'\033[33m' # Yellow + local r=$'\033[0m' # Reset + echo "$1" | sed -E "s/(█+)/${b}\1${y}/g" +} + +function title() { + echo + print_colored " █████╗ ███████╗████████╗███████╗ ██████╗" + print_colored "██╔══██╗╚══███╔╝╚══██╔══╝██╔════╝██╔════╝" + print_colored "███████║ ███╔╝ ██║ █████╗ ██║" + print_colored "██╔══██║ ███╔╝ ██║ ██╔══╝ ██║" + print_colored "██║ ██║███████╗ ██║ ███████╗╚██████╗" + print_colored "╚═╝ ╚═╝╚══════╝ ╚═╝ ╚══════╝ ╚═════╝" + echo -e "${r}" + echo -e "Welcome to the ${bold}${b}Aztec${r} installer! Your journey into blockchain privacy begins... ${bold}${p}now${r}." + echo -e "We presently leverage docker to simplify releases of our complex project." + echo -e "Please ensure it's installed for your platform: https://docs.docker.com/engine/install" + echo + if [ "$(uname -s)" == "Darwin" ]; then + echo -e "${y}WARNING: For best performance we recommend adjusting your default docker settings:" + echo -e " - Under general, enable VirtioFS." + echo -e " - Under resources, set CPUs to ~80-100% your maximum." + echo -e " - Under resources, set Memory to ~80% your maximum." + echo -e "You may receive a warning about your home directory being mounted into a container." + echo -e "This is requested so we can read and write project files, that is all." + echo -e "${r}" + fi + echo -e "This will install the following scripts and update your PATH if necessary:" + echo -e " ${bold}${g}aztec${r} - launches various infrastructure subsystems (sequencer, prover, pxe, etc)." + echo -e " ${bold}${g}aztec-cli${r} - a command line tool for interfacing and experimenting with infrastructure." + echo -e " ${bold}${g}aztec-nargo${r} - aztec's build of nargo, the noir compiler toolchain." + echo -e " ${bold}${g}aztec-sandbox${r} - a wrapper around docker-compose that launches services needed for sandbox testing." + echo -e " ${bold}${g}aztec-up${r} - a tool to upgrade the aztec toolchain to the latest, or specific versions." + echo + read -p "Do you wish to continue? (y/n)" -n 1 -r + echo + if [[ ! $REPLY =~ ^[Yy]$ ]]; then + exit 0 + fi +} + +function info { + echo -e "${g}$1${r}" +} + +function warn { + echo -e "${y}$1${r}" +} + +AZTEC_PATH=$HOME/.aztec +BIN_PATH=$AZTEC_PATH/bin + +# Define version if specified, otherwise set to "latest". +VERSION=${VERSION:-"latest"} +INSTALL_HOST=install.aztec.network.s3-website.eu-west-2.amazonaws.com + +[ -z "${SKIP_TITLE:-}" ] && title + +# Check if Docker is available. +if ! command -v docker &>/dev/null; then + warn "Docker is not installed. Please install Docker and try again." + exit 1 +fi + +# Check if Docker is running. +if ! docker info &>/dev/null; then + warn "Docker is not running. Please start Docker and try again." + exit 1 +fi + +if ! docker compose &>/dev/null && ! command -v docker-compose &>/dev/null; then + warn "WARNING: 'docker compose' not supported and docker-compose not found." + warn "Continuing installation, but aztec-sandbox will not work." +fi + +# Create a "hidden" `$HOME/.aztec` dir, so as not to clutter the user's cwd. +rm -f $BIN_PATH/* && mkdir -p $BIN_PATH + +# Download containers from dockerhub. Tag them as latest. +function pull_container { + docker pull aztecprotocol/$1:$VERSION + + # If not latest, retag to be latest so it runs from scripts. + if [ $VERSION != "latest" ]; then + docker tag aztecprotocol/$1:$VERSION aztecprotocol/$1:latest + fi +} + +if [ -z "${SKIP_PULL:-}" ]; then + info "Pulling aztec version $VERSION..." + pull_container aztec-sandbox + pull_container cli + pull_container noir +fi + +# Download the Docker Compose file. Used by aztec-start. +curl -fsSL http://$INSTALL_HOST/docker-compose.yml -o $BIN_PATH/docker-compose.yml + +function install_bin { + curl -fsSL http://$INSTALL_HOST/$1 -o $BIN_PATH/$1 + chmod +x $BIN_PATH/$1 + echo "Installed: $BIN_PATH/$1" +} + +info "Installing scripts in $BIN_PATH..." +install_bin .aztec-run +install_bin aztec +install_bin aztec-cli +install_bin aztec-sandbox +install_bin aztec-up +install_bin aztec-nargo + +function update_path_env_var { + TARGET_DIR="${1}" + # Check if the target directory is in the user's PATH. + if [[ ":$PATH:" != *":$TARGET_DIR:"* ]]; then + # Determine the user's shell. + SHELL_PROFILE="" + case $SHELL in + */bash) + SHELL_PROFILE="$HOME/.bashrc" + ;; + */zsh) + SHELL_PROFILE="$HOME/.zshrc" + ;; + # Add other shells as needed + *) + echo "Unsupported shell: $SHELL" + return + ;; + esac + # Inform the user about the change and ask for confirmation + warn "The directory $TARGET_DIR is not in your PATH." + read -p "Add it to $SHELL_PROFILE to make the aztec binaries accessible? (y/n)" -n 1 -r + echo + if [[ $REPLY =~ ^[Yy]$ ]]; then + # Add the target directory to the user's PATH in their profile. + echo "export PATH=\$PATH:$TARGET_DIR" >> "$SHELL_PROFILE" + info "Done! Starting fresh shell..." + $SHELL + else + warn "Skipped updating PATH. You might need to add $TARGET_DIR to your PATH manually to use the binary." + fi + fi +} + +update_path_env_var $BIN_PATH + +info "Done!" \ No newline at end of file diff --git a/aztec-up/bin/aztec-nargo b/aztec-up/bin/aztec-nargo new file mode 100755 index 00000000000..5fdee793d7d --- /dev/null +++ b/aztec-up/bin/aztec-nargo @@ -0,0 +1,4 @@ +#!/usr/bin/env bash +set -euo pipefail + +$(dirname $0)/.aztec-run aztecprotocol/noir $@ \ No newline at end of file diff --git a/aztec-up/bin/aztec-sandbox b/aztec-up/bin/aztec-sandbox new file mode 100755 index 00000000000..1558bc49cb2 --- /dev/null +++ b/aztec-up/bin/aztec-sandbox @@ -0,0 +1,21 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Favour 'docker compose', falling back on docker-compose. +CMD="docker compose" +$CMD &>/dev/null || CMD="docker-compose" + +ARGS="-f $HOME/.aztec/bin/docker-compose.yml -p sandbox" + +# Function to be executed when SIGINT is received. +cleanup() { + $CMD $ARGS down +} + +# Set trap to catch SIGINT and call the cleanup function. +trap cleanup SIGINT + +# Change working dir, so relative volume mounts are in the right place. +cd ~/.aztec + +$CMD $ARGS up --force-recreate --remove-orphans \ No newline at end of file diff --git a/aztec-up/bin/aztec-up b/aztec-up/bin/aztec-up new file mode 100755 index 00000000000..736307e1827 --- /dev/null +++ b/aztec-up/bin/aztec-up @@ -0,0 +1,5 @@ +#!/usr/bin/env bash +set -euo pipefail + +export SKIP_TITLE=1 +bash -i <(curl -s http://install.aztec.network) \ No newline at end of file diff --git a/aztec-up/bin/docker-compose.yml b/aztec-up/bin/docker-compose.yml new file mode 100644 index 00000000000..b861d729d16 --- /dev/null +++ b/aztec-up/bin/docker-compose.yml @@ -0,0 +1,35 @@ +version: "3" +services: + ethereum: + image: ghcr.io/foundry-rs/foundry@sha256:29ba6e34379e79c342ec02d437beb7929c9e254261e8032b17e187be71a2609f + command: > + ' + [ -n "$$FORK_URL" ] && ARGS="$$ARGS --fork-url $$FORK_URL"; + [ -n "$$FORK_BLOCK_NUMBER" ] && ARGS="$$ARGS --fork-block-number $$FORK_BLOCK_NUMBER"; + echo anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --silent $$ARGS; + anvil -p 8545 --host 0.0.0.0 --chain-id 31337 --silent $$ARGS + ' + ports: + - "${SANDBOX_ANVIL_PORT:-8545}:8545" + environment: + FORK_URL: + FORK_BLOCK_NUMBER: + + aztec: + image: "aztecprotocol/aztec-sandbox" + ports: + - "${SANDBOX_AZTEC_NODE_PORT:-8079}:8079" + - "${SANDBOX_PXE_PORT:-8080}:8080" + environment: + DEBUG: # Loaded from the user shell if explicitly set + HOST_WORKDIR: "${PWD}" # Loaded from the user shell to show log files absolute path in host + ETHEREUM_HOST: http://ethereum:8545 + CHAIN_ID: 31337 + ARCHIVER_POLLING_INTERVAL_MS: 50 + P2P_BLOCK_CHECK_INTERVAL_MS: 50 + SEQ_TX_POLLING_INTERVAL_MS: 50 + WS_BLOCK_CHECK_INTERVAL_MS: 50 + PXE_BLOCK_POLLING_INTERVAL_MS: 50 + ARCHIVER_VIEM_POLLING_INTERVAL_MS: 500 + volumes: + - ./log:/usr/src/yarn-project/aztec-sandbox/log:rw diff --git a/aztec-up/deploy.sh b/aztec-up/deploy.sh new file mode 100755 index 00000000000..14f89b8e8db --- /dev/null +++ b/aztec-up/deploy.sh @@ -0,0 +1,23 @@ +set -e + +BRANCH=$1 + +export TF_VAR_BRANCH=$BRANCH + +# Downloads and installs `terraform` if it's not installed. +if [ ! -f /usr/local/bin/terraform ]; then + cd $HOME + TERRAFORM_VERSION=1.5.2 + curl -sSL https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip -o terraform.zip + sudo apt install -y unzip + unzip terraform.zip + sudo mv terraform /usr/local/bin/ + rm terraform.zip + cd - +fi + +echo "Initializing terraform" +terraform init -input=false -backend-config="key=aztec-sandbox-website/$BRANCH" + +echo "Applying terraform config" +terraform apply -input=false -auto-approve \ No newline at end of file diff --git a/aztec-up/terraform/main.tf b/aztec-up/terraform/main.tf new file mode 100644 index 00000000000..2465082e3fa --- /dev/null +++ b/aztec-up/terraform/main.tf @@ -0,0 +1,88 @@ +terraform { + backend "s3" { + bucket = "aztec-terraform" + region = "eu-west-2" + key = "aztec-up" + } + required_providers { + aws = { + source = "hashicorp/aws" + version = "5.29.0" + } + } +} + +# Define provider and region +provider "aws" { + region = "eu-west-2" +} + +data "terraform_remote_state" "aztec2_iac" { + backend = "s3" + config = { + bucket = "aztec-terraform" + key = "aztec2/iac" + region = "eu-west-2" + } +} + +# Create the website S3 bucket +resource "aws_s3_bucket" "install_bucket" { + bucket = "install.aztec.network" +} + +resource "aws_s3_bucket_website_configuration" "website_bucket" { + bucket = aws_s3_bucket.install_bucket.id + + index_document { + suffix = "aztec-install" + } +} + +resource "aws_s3_bucket_public_access_block" "install_bucket_public_access" { + bucket = aws_s3_bucket.install_bucket.id + + block_public_acls = false + ignore_public_acls = false + block_public_policy = false + restrict_public_buckets = false +} + +resource "aws_s3_bucket_policy" "install_bucket_policy" { + bucket = aws_s3_bucket.install_bucket.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Principal = "*" + Action = "s3:GetObject" + Resource = "arn:aws:s3:::${aws_s3_bucket.install_bucket.id}/*" + } + ] + }) +} + +# Upload files to s3 bucket if changes were detected +resource "null_resource" "upload_public_directory" { + triggers = { + always_run = "${timestamp()}" + } + + provisioner "local-exec" { + command = "aws s3 sync ../bin s3://${aws_s3_bucket.install_bucket.id}/" + } +} + +resource "aws_route53_record" "subdomain_record" { + zone_id = data.terraform_remote_state.aztec2_iac.outputs.aws_route53_zone_id + name = "install.aztec.network" + type = "A" + + alias { + name = "${aws_s3_bucket_website_configuration.website_bucket.website_domain}" + zone_id = "${aws_s3_bucket.install_bucket.hosted_zone_id}" + evaluate_target_health = true + } +} diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index 96b02dbe9e8..ea339226546 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = 7afca0e7caf5440862b1ea12874418a319608121 - parent = 9f682cb8cf37eb392c4979f62fdec7126fb4d102 + commit = 86e6d6f1e48a0609de3f8e8f99007d45b4dbbdf4 + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b method = merge cmdver = 0.4.6 diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index 961b93e492c..65412da7cf3 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,72 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.6...barretenberg-v0.16.7) (2023-12-06) + + +### Features + +* Encapsulated Goblin ([#3524](https://github.com/AztecProtocol/aztec-packages/issues/3524)) ([2f08423](https://github.com/AztecProtocol/aztec-packages/commit/2f08423e37942f991634fe6c45de52feb1f159cf)) + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.5...barretenberg-v0.16.6) (2023-12-06) + + +### Miscellaneous + +* **barretenberg:** Synchronize aztec-packages versions + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.4...barretenberg-v0.16.5) (2023-12-06) + + +### Miscellaneous + +* Trivial change roundup ([#3556](https://github.com/AztecProtocol/aztec-packages/issues/3556)) ([ff893b2](https://github.com/AztecProtocol/aztec-packages/commit/ff893b236091b480b6de18ebaab57c62dcdfe1d4)) + + +### Documentation + +* Add libstdc++-12-dev to setup instructions ([#3585](https://github.com/AztecProtocol/aztec-packages/issues/3585)) ([9773e8c](https://github.com/AztecProtocol/aztec-packages/commit/9773e8c3b4789f0dd6b5fdaf0f283b9bd7c9812f)) + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.3...barretenberg-v0.16.4) (2023-12-05) + + +### Miscellaneous + +* **barretenberg:** Synchronize aztec-packages versions + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.2...barretenberg-v0.16.3) (2023-12-05) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.1...barretenberg-v0.16.2) (2023-12-05) + + +### Features + +* **AVM:** First version for mini AVM (ADD, RETURN, CALLDATACOPY) ([#3439](https://github.com/AztecProtocol/aztec-packages/issues/3439)) ([b3af146](https://github.com/AztecProtocol/aztec-packages/commit/b3af1463ed6b7858252ab4779f8c747a6de47363)) +* Flavor refactor, reduce duplication ([#3407](https://github.com/AztecProtocol/aztec-packages/issues/3407)) ([8d6b013](https://github.com/AztecProtocol/aztec-packages/commit/8d6b01304d797f7cbb576b23a7e115390d113c79)) +* New Poseidon2 circuit builder gates ([#3346](https://github.com/AztecProtocol/aztec-packages/issues/3346)) ([91cb369](https://github.com/AztecProtocol/aztec-packages/commit/91cb369aa7ecbf457965f53057cafa2c2e6f1214)) +* New Poseidon2 relations ([#3406](https://github.com/AztecProtocol/aztec-packages/issues/3406)) ([14b9736](https://github.com/AztecProtocol/aztec-packages/commit/14b9736925c6da33133bd24ee283fb4c199082a5)) +* Pull latest noir for brillig optimizations ([#3464](https://github.com/AztecProtocol/aztec-packages/issues/3464)) ([d356bac](https://github.com/AztecProtocol/aztec-packages/commit/d356bac740d203fbb9363a0127ca1d433358e029)) +* Seperate pil files for sub machines ([#3454](https://github.com/AztecProtocol/aztec-packages/issues/3454)) ([d09d6f5](https://github.com/AztecProtocol/aztec-packages/commit/d09d6f5a5f2c7e2a58658a640a6a6d6ba4294701)) + + +### Miscellaneous + +* **avm:** Enable AVM unit tests in CI ([#3463](https://github.com/AztecProtocol/aztec-packages/issues/3463)) ([051dda9](https://github.com/AztecProtocol/aztec-packages/commit/051dda9c50f1d9f11f5063ddf51c9986a6998b43)), closes [#3461](https://github.com/AztecProtocol/aztec-packages/issues/3461) +* **bb:** Pointer_view to reference-based get_all ([#3495](https://github.com/AztecProtocol/aztec-packages/issues/3495)) ([50d7327](https://github.com/AztecProtocol/aztec-packages/commit/50d73271919306a05ac3a7c2e7d37363b6761248)) +* **bb:** Reuse entities from GoblinUltra in GoblinUltraRecursive ([#3521](https://github.com/AztecProtocol/aztec-packages/issues/3521)) ([8259636](https://github.com/AztecProtocol/aztec-packages/commit/8259636c016c0adecb052f176e78444fb5481c38)) +* Build the acir test vectors as part of CI. ([#3447](https://github.com/AztecProtocol/aztec-packages/issues/3447)) ([1a2d1f8](https://github.com/AztecProtocol/aztec-packages/commit/1a2d1f822d0e1fabd322c2c4d0473629edd56380)) +* Field-agnostic and reusable transcript ([#3433](https://github.com/AztecProtocol/aztec-packages/issues/3433)) ([d78775a](https://github.com/AztecProtocol/aztec-packages/commit/d78775adb9574a3d76c3fca8cf940cdef460ae10)) +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) +* Recursive verifier updates ([#3452](https://github.com/AztecProtocol/aztec-packages/issues/3452)) ([dbb4a12](https://github.com/AztecProtocol/aztec-packages/commit/dbb4a1205528bdd8217ea2d15ccf060e2aa9b7d2)) +* Refactor `WitnessEntities` to be able to derive `WitnessCommitments` from it ([#3479](https://github.com/AztecProtocol/aztec-packages/issues/3479)) ([9c9b561](https://github.com/AztecProtocol/aztec-packages/commit/9c9b561f392de5fce11cefe4d72e4f33f2567f41)) +* Transcript handled through shared_ptr ([#3434](https://github.com/AztecProtocol/aztec-packages/issues/3434)) ([30fca33](https://github.com/AztecProtocol/aztec-packages/commit/30fca3307ee7e33d81fd51c3d280c6362baef0b9)) +* Typo fixes ([#3488](https://github.com/AztecProtocol/aztec-packages/issues/3488)) ([d9a44dc](https://github.com/AztecProtocol/aztec-packages/commit/d9a44dc2e655752e1c6503ac85b64169ec7e4754)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.0...barretenberg-v0.16.1) (2023-11-28) diff --git a/barretenberg/README.md b/barretenberg/README.md index fd24c4fa039..b87cf333b54 100644 --- a/barretenberg/README.md +++ b/barretenberg/README.md @@ -46,12 +46,13 @@ Ignores proving key construction. - Ninja (used by the presets as the default generator) - clang >= 16 or gcc >= 10 - clang-format +- libstdc++ >= 12 - libomp (if multithreading is required. Multithreading can be disabled using the compiler flag `-DMULTITHREADING 0`) To install on Ubuntu, run: ``` -sudo apt-get install cmake clang clang-format ninja-build +sudo apt-get install cmake clang clang-format ninja-build libstdc++-12-dev ``` ### Installing openMP (Linux) diff --git a/barretenberg/acir_tests/Dockerfile.bb b/barretenberg/acir_tests/Dockerfile.bb index 000627d8172..7e211d62eca 100644 --- a/barretenberg/acir_tests/Dockerfile.bb +++ b/barretenberg/acir_tests/Dockerfile.bb @@ -1,7 +1,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-assert FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add git bash curl jq coreutils COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build COPY --from=noir-acir-tests /usr/src/noir/test_programs /usr/src/noir/test_programs diff --git a/barretenberg/acir_tests/Dockerfile.bb.js b/barretenberg/acir_tests/Dockerfile.bb.js index 1b86d6e953d..760b231fc90 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.js +++ b/barretenberg/acir_tests/Dockerfile.bb.js @@ -1,8 +1,8 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18 -COPY --from=0 /usr/src/barretenberg/ts /usr/src/barretenberg/ts +FROM node:18.19.0 +COPY --from=0 /usr/src/barretenberg/ts-build /usr/src/barretenberg/ts COPY --from=noir-acir-tests /usr/src/noir/test_programs /usr/src/noir/test_programs RUN apt update && apt install -y lsof jq WORKDIR /usr/src/barretenberg/acir_tests diff --git a/barretenberg/acir_tests/Dockerfile.bb.sol b/barretenberg/acir_tests/Dockerfile.bb.sol index 7840d0c2e4f..40ffa075078 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.sol +++ b/barretenberg/acir_tests/Dockerfile.bb.sol @@ -2,7 +2,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clan FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-sol FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add git bash curl jq COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build COPY --from=1 /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol diff --git a/barretenberg/acir_tests/Dockerfile.noir_acir_tests b/barretenberg/acir_tests/Dockerfile.noir_acir_tests index 87fdd8604a4..5131e94e279 100644 --- a/barretenberg/acir_tests/Dockerfile.noir_acir_tests +++ b/barretenberg/acir_tests/Dockerfile.noir_acir_tests @@ -3,7 +3,7 @@ # So, it lives here. # This chains off the nargo build, and creates a container with a compiled set of acir tests. FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir -RUN apk add bash jq +RUN apt update && apt install -y jq && rm -rf /var/lib/apt/lists/* && apt-get clean ENV PATH="/usr/src/noir/target/release:${PATH}" WORKDIR /usr/src/noir/test_programs COPY . . diff --git a/barretenberg/acir_tests/bash_helpers/catch.sh b/barretenberg/acir_tests/bash_helpers/catch.sh index 888af3cbb44..bc2025d4da5 100644 --- a/barretenberg/acir_tests/bash_helpers/catch.sh +++ b/barretenberg/acir_tests/bash_helpers/catch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Handler for SIGCHLD, cleanup if child exit with error handle_sigchild() { diff --git a/barretenberg/acir_tests/bench_acir_tests.sh b/barretenberg/acir_tests/bench_acir_tests.sh index b338e82e30b..a41b261d1cc 100755 --- a/barretenberg/acir_tests/bench_acir_tests.sh +++ b/barretenberg/acir_tests/bench_acir_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash TEST_NAMES=("$@") THREADS=(1 4 16 32 64) diff --git a/barretenberg/acir_tests/clone_test_vectors.sh b/barretenberg/acir_tests/clone_test_vectors.sh index c6fd6ef1bf1..b15814ab013 100755 --- a/barretenberg/acir_tests/clone_test_vectors.sh +++ b/barretenberg/acir_tests/clone_test_vectors.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu TEST_SRC=${TEST_SRC:-../../noir/test_programs/acir_artifacts} diff --git a/barretenberg/acir_tests/gen_inner_proof_inputs.sh b/barretenberg/acir_tests/gen_inner_proof_inputs.sh index 64b87fe19db..36137bde82e 100755 --- a/barretenberg/acir_tests/gen_inner_proof_inputs.sh +++ b/barretenberg/acir_tests/gen_inner_proof_inputs.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Env var overrides: # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). set -eu diff --git a/barretenberg/acir_tests/run_acir_tests.sh b/barretenberg/acir_tests/run_acir_tests.sh index 84953d01c55..ee28c975113 100755 --- a/barretenberg/acir_tests/run_acir_tests.sh +++ b/barretenberg/acir_tests/run_acir_tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Env var overrides: # BIN: to specify a different binary to test with (e.g. bb.js or bb.js-dev). # VERBOSE: to enable logging for each test. @@ -80,8 +80,8 @@ else # If parallel flag is set, run in parallel if [ -n "${PARALLEL:-}" ]; then test $TEST_NAME & - else - test $TEST_NAME + else + test $TEST_NAME fi done fi diff --git a/barretenberg/acir_tests/run_acir_tests_browser.sh b/barretenberg/acir_tests/run_acir_tests_browser.sh index 22830656250..1c1f2ce0e08 100755 --- a/barretenberg/acir_tests/run_acir_tests_browser.sh +++ b/barretenberg/acir_tests/run_acir_tests_browser.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -em cleanup() { diff --git a/barretenberg/bootstrap.sh b/barretenberg/bootstrap.sh index 504f1980982..c16fd294b6c 100755 --- a/barretenberg/bootstrap.sh +++ b/barretenberg/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index b5b1422ef34..56ea3977f17 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.16.1 # x-release-please-version + VERSION 0.16.7 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/cpp/bootstrap.sh b/barretenberg/cpp/bootstrap.sh index 25350c4fcce..0b9295bafbd 100755 --- a/barretenberg/cpp/bootstrap.sh +++ b/barretenberg/cpp/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Navigate to script folder diff --git a/barretenberg/cpp/format.sh b/barretenberg/cpp/format.sh index 0bf8bca805c..ae314e96a6f 100755 --- a/barretenberg/cpp/format.sh +++ b/barretenberg/cpp/format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e if [ "$1" == "staged" ]; then diff --git a/barretenberg/cpp/pil/fib/fibonacci.pil b/barretenberg/cpp/pil/fib/fibonacci.pil deleted file mode 100644 index 993a675e7d2..00000000000 --- a/barretenberg/cpp/pil/fib/fibonacci.pil +++ /dev/null @@ -1,18 +0,0 @@ -constant %N = 16; - -// This uses the alternative nomenclature as well. - -namespace Fibonacci(%N); - col fixed LAST(i) { match i { - %N - 1 => 1, - _ => 0, - } }; - col fixed FIRST(i) { match i { - 0 => 1, - _ => 0, - } }; - col witness x, y; - - (1-FIRST) * (1-LAST) * (x' - y) = 0; - (1-FIRST) * (1-LAST) * (y' - (x + y)) = 0; - diff --git a/barretenberg/cpp/pil/fib/fibonacci_opt.pil b/barretenberg/cpp/pil/fib/fibonacci_opt.pil deleted file mode 100644 index 2c36cd15327..00000000000 --- a/barretenberg/cpp/pil/fib/fibonacci_opt.pil +++ /dev/null @@ -1,8 +0,0 @@ -constant %N = 16; -namespace Fibonacci(16); - col fixed LAST(i) { match i { (%N - 1) => 1, _ => 0, } }; - col fixed FIRST(i) { match i { 0 => 1, _ => 0, } }; - col witness x; - col witness y; - (((1 - Fibonacci.FIRST) * (1 - Fibonacci.LAST)) * (Fibonacci.x' - Fibonacci.y)) = 0; - (((1 - Fibonacci.FIRST) * (1 - Fibonacci.LAST)) * (Fibonacci.y' - (Fibonacci.x + Fibonacci.y))) = 0; diff --git a/barretenberg/cpp/scripts/bb-tests.sh b/barretenberg/cpp/scripts/bb-tests.sh index 459eb756593..a3f322b2afa 100755 --- a/barretenberg/cpp/scripts/bb-tests.sh +++ b/barretenberg/cpp/scripts/bb-tests.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script runs all test suites that have not been broken out into their own jobs for parallelisation. # Might be better to list exclusions here rather than inclusions as risky to maintain. set -eu diff --git a/barretenberg/cpp/scripts/benchmarks.sh b/barretenberg/cpp/scripts/benchmarks.sh index a7ec1dcb1d0..8a547c29aba 100755 --- a/barretenberg/cpp/scripts/benchmarks.sh +++ b/barretenberg/cpp/scripts/benchmarks.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Move above script dir. diff --git a/barretenberg/cpp/scripts/collect_coverage_information.sh b/barretenberg/cpp/scripts/collect_coverage_information.sh index b018e251483..0a3231a304c 100755 --- a/barretenberg/cpp/scripts/collect_coverage_information.sh +++ b/barretenberg/cpp/scripts/collect_coverage_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash if [ $# -ne 2 ]; then echo "Usage: $0 " diff --git a/barretenberg/cpp/scripts/collect_heap_information.sh b/barretenberg/cpp/scripts/collect_heap_information.sh index 1d25c5a791c..a1b6bee078e 100755 --- a/barretenberg/cpp/scripts/collect_heap_information.sh +++ b/barretenberg/cpp/scripts/collect_heap_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu PRESET=gperftools diff --git a/barretenberg/cpp/scripts/collect_profile_information.sh b/barretenberg/cpp/scripts/collect_profile_information.sh index df932c086bc..ebc0249392e 100755 --- a/barretenberg/cpp/scripts/collect_profile_information.sh +++ b/barretenberg/cpp/scripts/collect_profile_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # can also be 'xray-1thread' diff --git a/barretenberg/cpp/scripts/install-wasi-sdk.sh b/barretenberg/cpp/scripts/install-wasi-sdk.sh index 1da258d880b..9d27d012a61 100755 --- a/barretenberg/cpp/scripts/install-wasi-sdk.sh +++ b/barretenberg/cpp/scripts/install-wasi-sdk.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [[ -d ./src/wasi-sdk-20.0 && -d ./src/wasi-sdk-20.0+threads ]]; then diff --git a/barretenberg/cpp/scripts/run_tests b/barretenberg/cpp/scripts/run_tests index 28d9f8cc4f8..a67f03ee992 100755 --- a/barretenberg/cpp/scripts/run_tests +++ b/barretenberg/cpp/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This is the default test runner which takes the as arguments: # 1. The number of ignition transcripts to download. # 2. The set of gtest binary names to run. diff --git a/barretenberg/cpp/src/CMakeLists.txt b/barretenberg/cpp/src/CMakeLists.txt index b09a2181ebc..8f3ee4289c5 100644 --- a/barretenberg/cpp/src/CMakeLists.txt +++ b/barretenberg/cpp/src/CMakeLists.txt @@ -58,8 +58,8 @@ add_subdirectory(barretenberg/eccvm) add_subdirectory(barretenberg/env) add_subdirectory(barretenberg/examples) add_subdirectory(barretenberg/flavor) -add_subdirectory(barretenberg/grumpkin_srs_gen) add_subdirectory(barretenberg/goblin) +add_subdirectory(barretenberg/grumpkin_srs_gen) add_subdirectory(barretenberg/honk) add_subdirectory(barretenberg/join_split_example) add_subdirectory(barretenberg/numeric) diff --git a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh b/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh index 0ac6dce1157..34ee2ce171d 100755 --- a/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh +++ b/barretenberg/cpp/src/barretenberg/benchmark/compare_branch_vs_baseline.sh @@ -1,7 +1,7 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to compare a suite of benchmarks between baseline (default: master) and -# the branch from which the script is run. Simply check out the branch of interest, ensure +# the branch from which the script is run. Simply check out the branch of interest, ensure # it is up to date with local master, and run the script. # Specify the benchmark suite and the "baseline" branch against which to compare @@ -43,7 +43,7 @@ BASELINE_RESULTS="$BENCH_RESULTS_DIR/results_baseline.json" echo -e "\nRunning $BENCH_TARGET in master.." bin/$BENCH_TARGET --benchmark_format=json > $BASELINE_RESULTS -# Call compare.py on the results (json) to get high level statistics. +# Call compare.py on the results (json) to get high level statistics. # See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. $BENCH_TOOLS_DIR/compare.py benchmarks $BASELINE_RESULTS $BRANCH_RESULTS diff --git a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp index 6f223939a4b..0ef9fabfa0a 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp @@ -218,7 +218,7 @@ void construct_proof_with_specified_num_iterations( Composer composer; for (auto _ : state) { - // Constuct circuit and prover; don't include this part in measurement + // Construct circuit and prover; don't include this part in measurement state.PauseTiming(); auto prover = get_prover(composer, test_circuit_function, num_iterations); state.ResumeTiming(); diff --git a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh index 1863327ae4e..f97dbc0eaa7 100755 --- a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh +++ b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/compare_honk_to_plonk_ultra.sh @@ -1,7 +1,7 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to compare the results of honk_bench between baseline (master) and -# the branch from which the script is run. Simply check out the branch of interest, ensure +# the branch from which the script is run. Simply check out the branch of interest, ensure # it is up to date with local master, and run the script. echo -e '\nComparing Ultra Plonk/Honk benchmarks.' @@ -19,21 +19,21 @@ pip3 install --user -r $BUILD_DIR/_deps/benchmark-src/requirements.txt cd $BASE_DIR mkdir $BENCH_RESULTS_DIR -# +# echo -e '\nBuilding and running Standard benchmarks..' # rm -rf $BUILD_DIR -cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_plonk_bench +cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_plonk_bench cd build-bench PLONK_BENCH_RESULTS="$BENCH_RESULTS_DIR/plonk_bench.json" ./bin/ultra_plonk_bench --benchmark_format=json > $PLONK_BENCH_RESULTS cd .. -cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_honk_bench +cmake --preset bench > /dev/null && cmake --build --preset bench --target ultra_honk_bench cd build-bench HONK_BENCH_RESULTS="$BENCH_RESULTS_DIR/honk_bench.json" ./bin/ultra_honk_bench --benchmark_format=json > $HONK_BENCH_RESULTS -# Call compare.py on the results (json) to get high level statistics. +# Call compare.py on the results (json) to get high level statistics. # See docs at https://github.com/google/benchmark/blob/main/docs/tools.md for more details. $BENCH_TOOLS_DIR/compare.py benchmarks $PLONK_BENCH_RESULTS $HONK_BENCH_RESULTS diff --git a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp index e0cfc6e6477..926923d767e 100644 --- a/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp +++ b/barretenberg/cpp/src/barretenberg/commitment_schemes/kzg/kzg.hpp @@ -81,16 +81,15 @@ template class KZG { auto quotient_commitment = verifier_transcript->template receive_from_prover("KZG:W"); GroupElement P_0; - // Note: In the recursive setting, we only add the contribution if it is not the point at infinity (i.e. if the - // evaluation is not equal to zero). if constexpr (Curve::is_stdlib_type) { auto builder = verifier_transcript->builder; auto one = Fr(builder, 1); - std::vector commitments = { claim.commitment, quotient_commitment }; - std::vector scalars = { one, claim.opening_pair.challenge }; + std::vector commitments = { claim.commitment, + quotient_commitment, + GroupElement::one(builder) }; + std::vector scalars = { one, claim.opening_pair.challenge, -claim.opening_pair.evaluation }; P_0 = GroupElement::batch_mul(commitments, scalars); - // Note: This implementation assumes the evaluation is zero (as is the case for shplonk). - ASSERT(claim.opening_pair.evaluation.get_value() == 0); + } else { P_0 = claim.commitment; P_0 += quotient_commitment * claim.opening_pair.challenge; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index 16d3f3a9cfb..60b5804a47d 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -2,7 +2,7 @@ #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" #include "barretenberg/common/ref_array.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/honk/proof_system/power_polynomial.hpp" #include "barretenberg/polynomials/polynomial.hpp" @@ -183,7 +183,7 @@ template void ECCVMProver_::execute_log_derivative_ gamma * (gamma + beta_sqr) * (gamma + beta_sqr + beta_sqr) * (gamma + beta_sqr + beta_sqr + beta_sqr); relation_parameters.eccvm_set_permutation_delta = relation_parameters.eccvm_set_permutation_delta.invert(); // Compute inverse polynomial for our logarithmic-derivative lookup method - lookup_library::compute_logderivative_inverse( + logderivative_library::compute_logderivative_inverse( prover_polynomials, relation_parameters, key->circuit_size); transcript->send_to_verifier(commitment_labels.lookup_inverses, commitment_key->commit(key->lookup_inverses)); prover_polynomials.lookup_inverses = key->lookup_inverses; @@ -345,31 +345,34 @@ template void ECCVMProver_::execute_transcript_cons transcript->send_to_verifier("Translation:hack_evaluation", hack.evaluate(evaluation_challenge_x)); // Get another challenge for batching the univariate claims - FF batching_challenge = transcript->get_challenge("Translation:batching_challenge"); + FF ipa_batching_challenge = transcript->get_challenge("Translation:ipa_batching_challenge"); // Collect the polynomials and evaluations to be batched RefArray univariate_polynomials{ key->transcript_op, key->transcript_Px, key->transcript_Py, key->transcript_z1, key->transcript_z2, hack }; std::array univariate_evaluations; - // Constuct the batched polynomial and batched evaluation + // Construct the batched polynomial and batched evaluation Polynomial batched_univariate{ key->circuit_size }; FF batched_evaluation{ 0 }; auto batching_scalar = FF(1); for (auto [polynomial, eval] : zip_view(univariate_polynomials, univariate_evaluations)) { batched_univariate.add_scaled(polynomial, batching_scalar); batched_evaluation += eval * batching_scalar; - batching_scalar *= batching_challenge; + batching_scalar *= ipa_batching_challenge; } // Compute a proof for the batched univariate opening PCS::compute_opening_proof( commitment_key, { evaluation_challenge_x, batched_evaluation }, batched_univariate, transcript); + + // Get another challenge for batching the univariate claims + translation_batching_challenge_v = transcript->get_challenge("Translation:batching_challenge"); } template plonk::proof& ECCVMProver_::export_proof() { - proof.proof_data = transcript->proof_data; + proof.proof_data = transcript->export_proof(); return proof; } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp index 39b42b89688..af6f4ec457f 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp @@ -69,6 +69,7 @@ template class ECCVMProver_ { Polynomial quotient_W; FF evaluation_challenge_x; + FF translation_batching_challenge_v; // to be rederived by the translator verifier sumcheck::SumcheckOutput sumcheck_output; pcs::gemini::ProverOutput gemini_output; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index d7f26ce7299..76058a9c800 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -253,16 +253,17 @@ template bool ECCVMVerifier_::verify_proof(const plonk transcript->template receive_from_prover("Translation:hack_evaluation") }; - FF batching_challenge = transcript->get_challenge("Translation:batching_challenge"); + // Get another challenge for batching the univariate claims + FF ipa_batching_challenge = transcript->get_challenge("Translation:ipa_batching_challenge"); - // Constuct batched commitment and batched evaluation + // Construct batched commitment and batched evaluation auto batched_commitment = transcript_commitments[0]; auto batched_transcript_eval = transcript_evaluations[0]; - auto batching_scalar = batching_challenge; + auto batching_scalar = ipa_batching_challenge; for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { batched_commitment = batched_commitment + transcript_commitments[idx] * batching_scalar; batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; - batching_scalar *= batching_challenge; + batching_scalar *= ipa_batching_challenge; } // Construct and verify batched opening claim diff --git a/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt index 19dc8bec7b1..f2c41054b03 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(flavor commitment_schemes ecc polynomials proof_system) \ No newline at end of file +barretenberg_module(flavor commitment_schemes ecc polynomials proof_system) diff --git a/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp b/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp deleted file mode 100644 index 9572fedd3ff..00000000000 --- a/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp +++ /dev/null @@ -1,296 +0,0 @@ - - -#pragma once -#include "../relation_definitions_fwd.hpp" -#include "barretenberg/commitment_schemes/kzg/kzg.hpp" -#include "barretenberg/ecc/curves/bn254/g1.hpp" -#include "barretenberg/polynomials/barycentric.hpp" -#include "barretenberg/polynomials/univariate.hpp" - -#include "barretenberg/flavor/flavor.hpp" -#include "barretenberg/flavor/flavor_macros.hpp" -#include "barretenberg/polynomials/evaluation_domain.hpp" -#include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/relations/generated/Fib.hpp" -#include "barretenberg/transcript/transcript.hpp" - -namespace proof_system::honk { -namespace flavor { - -class FibFlavor { - public: - using Curve = curve::BN254; - using G1 = Curve::Group; - using PCS = pcs::kzg::KZG; - - using FF = G1::subgroup_field; - using Polynomial = barretenberg::Polynomial; - using PolynomialHandle = std::span; - using GroupElement = G1::element; - using Commitment = G1::affine_element; - using CommitmentHandle = G1::affine_element; - using CommitmentKey = pcs::CommitmentKey; - using VerifierCommitmentKey = pcs::VerifierCommitmentKey; - - static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 2; - static constexpr size_t NUM_WITNESS_ENTITIES = 2; - static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; - // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for - // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 6; - - using Relations = std::tuple>; - - static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); - - // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` - // random polynomial e.g. For \sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation - // length = 3 - static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; - static constexpr size_t NUM_RELATIONS = std::tuple_size::value; - - template - using ProtogalaxyTupleOfTuplesOfUnivariates = - decltype(create_protogalaxy_tuple_of_tuples_of_univariates()); - using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); - using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); - - static constexpr bool has_zero_row = true; - - private: - template class PrecomputedEntities : public PrecomputedEntitiesBase { - public: - using DataType = DataType_; - DEFINE_FLAVOR_MEMBERS(DataType, Fibonacci_LAST, Fibonacci_FIRST) - - RefVector get_selectors() - { - return { - Fibonacci_LAST, - Fibonacci_FIRST, - }; - }; - - RefVector get_sigma_polynomials() { return {}; }; - RefVector get_id_polynomials() { return {}; }; - RefVector get_table_polynomials() { return {}; }; - }; - - template class WitnessEntities { - public: - DEFINE_FLAVOR_MEMBERS(DataType, Fibonacci_x, Fibonacci_y) - - RefVector get_wires() - { - return { - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_sorted_polynomials() { return {}; }; - }; - - template class AllEntities { - public: - DEFINE_FLAVOR_MEMBERS( - DataType, Fibonacci_LAST, Fibonacci_FIRST, Fibonacci_x, Fibonacci_y, Fibonacci_x_shift, Fibonacci_y_shift) - - RefVector get_wires() - { - return { - Fibonacci_LAST, Fibonacci_FIRST, Fibonacci_x, Fibonacci_y, Fibonacci_x_shift, Fibonacci_y_shift, - - }; - }; - - RefVector get_unshifted() - { - return { - Fibonacci_LAST, - Fibonacci_FIRST, - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_to_be_shifted() - { - return { - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_shifted() - { - return { - Fibonacci_x_shift, - Fibonacci_y_shift, - - }; - }; - }; - - public: - class ProvingKey : public ProvingKey_, WitnessEntities> { - public: - // Expose constructors on the base class - using Base = ProvingKey_, WitnessEntities>; - using Base::Base; - - // The plookup wires that store plookup read data. - std::array get_table_column_wires() { return {}; }; - }; - - using VerificationKey = VerificationKey_>; - - using ProverPolynomials = AllEntities; - - using FoldedPolynomials = AllEntities>; - - class AllValues : public AllEntities { - public: - using Base = AllEntities; - using Base::Base; - }; - - class AllPolynomials : public AllEntities { - public: - [[nodiscard]] size_t get_polynomial_size() const { return this->Fibonacci_LAST.size(); } - [[nodiscard]] AllValues get_row(const size_t row_idx) const - { - AllValues result; - for (auto [result_field, polynomial] : zip_view(result.get_all(), get_all())) { - result_field = polynomial[row_idx]; - } - return result; - } - }; - - using RowPolynomials = AllEntities; - - class PartiallyEvaluatedMultivariates : public AllEntities { - public: - PartiallyEvaluatedMultivariates() = default; - PartiallyEvaluatedMultivariates(const size_t circuit_size) - { - // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) - for (auto& poly : get_all()) { - poly = Polynomial(circuit_size / 2); - } - } - }; - - /** - * @brief A container for univariates used during Protogalaxy folding and sumcheck. - * @details During folding and sumcheck, the prover evaluates the relations on these univariates. - */ - template using ProverUnivariates = AllEntities>; - - /** - * @brief A container for univariates produced during the hot loop in sumcheck. - */ - using ExtendedEdges = ProverUnivariates; - - class CommitmentLabels : public AllEntities { - private: - using Base = AllEntities; - - public: - CommitmentLabels() - : AllEntities() - { - Base::Fibonacci_LAST = "Fibonacci_LAST"; - Base::Fibonacci_FIRST = "Fibonacci_FIRST"; - Base::Fibonacci_x = "Fibonacci_x"; - Base::Fibonacci_y = "Fibonacci_y"; - }; - }; - - class VerifierCommitments : public AllEntities { - private: - using Base = AllEntities; - - public: - VerifierCommitments(const std::shared_ptr& verification_key) - { - Fibonacci_LAST = verification_key->Fibonacci_LAST; - Fibonacci_FIRST = verification_key->Fibonacci_FIRST; - } - }; - - class Transcript : public BaseTranscript { - public: - uint32_t circuit_size; - - Commitment Fibonacci_x; - Commitment Fibonacci_y; - - std::vector> sumcheck_univariates; - std::array sumcheck_evaluations; - std::vector zm_cq_comms; - Commitment zm_cq_comm; - Commitment zm_pi_comm; - - Transcript() = default; - - Transcript(const std::vector& proof) - : BaseTranscript(proof) - {} - - void deserialize_full_transcript() - { - size_t num_bytes_read = 0; - circuit_size = deserialize_from_buffer(proof_data, num_bytes_read); - size_t log_n = numeric::get_msb(circuit_size); - - Fibonacci_x = deserialize_from_buffer(BaseTranscript::proof_data, num_bytes_read); - Fibonacci_y = deserialize_from_buffer(BaseTranscript::proof_data, num_bytes_read); - - for (size_t i = 0; i < log_n; ++i) { - sumcheck_univariates.emplace_back( - deserialize_from_buffer>( - BaseTranscript::proof_data, num_bytes_read)); - } - sumcheck_evaluations = - deserialize_from_buffer>(BaseTranscript::proof_data, num_bytes_read); - for (size_t i = 0; i < log_n; ++i) { - zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_bytes_read)); - } - zm_cq_comm = deserialize_from_buffer(proof_data, num_bytes_read); - zm_pi_comm = deserialize_from_buffer(proof_data, num_bytes_read); - } - - void serialize_full_transcript() - { - size_t old_proof_length = proof_data.size(); - BaseTranscript::proof_data.clear(); - size_t log_n = numeric::get_msb(circuit_size); - - serialize_to_buffer(circuit_size, BaseTranscript::proof_data); - - serialize_to_buffer(Fibonacci_x, BaseTranscript::proof_data); - serialize_to_buffer(Fibonacci_y, BaseTranscript::proof_data); - - for (size_t i = 0; i < log_n; ++i) { - serialize_to_buffer(sumcheck_univariates[i], BaseTranscript::proof_data); - } - serialize_to_buffer(sumcheck_evaluations, BaseTranscript::proof_data); - for (size_t i = 0; i < log_n; ++i) { - serialize_to_buffer(zm_cq_comms[i], proof_data); - } - serialize_to_buffer(zm_cq_comm, proof_data); - serialize_to_buffer(zm_pi_comm, proof_data); - - // sanity check to make sure we generate the same length of proof as before. - ASSERT(proof_data.size() == old_proof_length); - } - }; -}; - -} // namespace flavor -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp index 80d1ca652e9..ce322761ccb 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp @@ -194,11 +194,6 @@ class GoblinUltra { { return { this->ecc_op_wire_1, this->ecc_op_wire_2, this->ecc_op_wire_3, this->ecc_op_wire_4 }; } - // The sorted concatenations of table and witness data needed for plookup. - RefVector get_sorted_polynomials() - { - return { this->sorted_1, this->sorted_2, this->sorted_3, this->sorted_4 }; - }; }; template class ShiftedEntities { @@ -290,11 +285,60 @@ class GoblinUltra { this->calldata_read_counts, this->lookup_inverses }; }; + + RefVector get_witness() + { + return { this->w_l, + this->w_r, + this->w_o, + this->w_4, + this->sorted_accum, + this->z_perm, + this->z_lookup, + this->ecc_op_wire_1, + this->ecc_op_wire_2, + this->ecc_op_wire_3, + this->ecc_op_wire_4, + this->calldata, + this->calldata_read_counts, + this->lookup_inverses }; + }; RefVector get_to_be_shifted() { return { this->table_1, this->table_2, this->table_3, this->table_4, this->w_l, this->w_r, this->w_o, this->w_4, this->sorted_accum, this->z_perm, this->z_lookup }; }; + RefVector get_precomputed() + { + return { this->q_m, + this->q_c, + this->q_l, + this->q_r, + this->q_o, + this->q_4, + this->q_arith, + this->q_sort, + this->q_elliptic, + this->q_aux, + this->q_lookup, + this->q_busread, + this->sigma_1, + this->sigma_2, + this->sigma_3, + this->sigma_4, + this->id_1, + this->id_2, + this->id_3, + this->id_4, + this->table_1, + this->table_2, + this->table_3, + this->table_4, + this->lagrange_first, + this->lagrange_last, + this->lagrange_ecc_op, + this->databus_id }; + } RefVector get_shifted() { return ShiftedEntities::get_all(); }; }; @@ -381,6 +425,29 @@ class GoblinUltra { } }; + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** * @brief A container for the witness commitments. */ @@ -488,7 +555,7 @@ class GoblinUltra { using VerifierCommitments = VerifierCommitments_; class FoldingParameters { public: - std::vector gate_separation_challenges; + std::vector gate_challenges; FF target_sum; }; diff --git a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp index 79b573b02e1..fb6e0751361 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp @@ -107,7 +107,7 @@ template class GoblinUltraRecursive_ { * @param builder * @param native_key Native verification key from which to extract the precomputed commitments */ - VerificationKey(CircuitBuilder* builder, std::shared_ptr native_key) + VerificationKey(CircuitBuilder* builder, const std::shared_ptr& native_key) : VerificationKey_>(native_key->circuit_size, native_key->num_public_inputs) { diff --git a/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp b/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp new file mode 100644 index 00000000000..ba0c7c2b465 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp @@ -0,0 +1,376 @@ +#pragma once +#include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/commitment_schemes/kzg/kzg.hpp" +#include "barretenberg/ecc/curves/bn254/bn254.hpp" +#include "barretenberg/flavor/flavor.hpp" +#include "barretenberg/flavor/flavor_macros.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/relations/relation_types.hpp" +#include "barretenberg/relations/toy_avm/generic_permutation_relation.hpp" +#include "barretenberg/relations/toy_avm/relation_definer.hpp" +#include "relation_definitions_fwd.hpp" +#include +#include +#include +#include +#include +#include + +// NOLINTBEGIN(cppcoreguidelines-avoid-const-or-ref-data-members) + +namespace proof_system::honk { +namespace flavor { + +/** + * @brief This class provides an example flavor for using GenericPermutationRelations with various settings to make + * integrating those mechanisms into AVM easier + * + */ +class ToyAVM { + public: + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using GroupElement = Curve::Element; + using Commitment = Curve::AffineElement; + using CommitmentHandle = Curve::AffineElement; + using PCS = pcs::kzg::KZG; + using Polynomial = barretenberg::Polynomial; + using PolynomialHandle = std::span; + using CommitmentKey = pcs::CommitmentKey; + using VerifierCommitmentKey = pcs::VerifierCommitmentKey; + + // The number of wires is 5. The set of tuples (permutation_set_column_1,permutation_set_column_2) should be + // equivalent to (permutation_set_column_3, permutation_set_column_4) and the self_permutation_column contains 2 + // subsets which are permutations of each other + static constexpr size_t NUM_WIRES = 5; + + // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often + // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. + // Note: this number does not include the individual sorted list polynomials. + static constexpr size_t NUM_ALL_ENTITIES = 12; + // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying + // assignment of witnesses. We again choose a neutral name. + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 5; + // The total number of witness entities not including shifts. + static constexpr size_t NUM_WITNESS_ENTITIES = 7; + + // define the tuple of Relations that comprise the Sumcheck relation + using Relations = std::tuple>; + + static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); + + // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` + // random polynomial e.g. For \sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation + // length = 3 + static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; + static constexpr size_t NUM_RELATIONS = std::tuple_size::value; + + // Instantiate the BarycentricData needed to extend each Relation Univariate + + // define the containers for storing the contributions from each relation in Sumcheck + using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); + using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); + + private: + /** + * @brief A base class labelling precomputed entities and (ordered) subsets of interest. + * @details Used to build the proving key and verification key. + */ + template class PrecomputedEntities : public PrecomputedEntitiesBase { + public: + using DataType = DataType_; + DEFINE_FLAVOR_MEMBERS(DataType, + lagrange_first, // column 0 + enable_tuple_set_permutation, // column 1 + enable_single_column_permutation, // column 2 + enable_first_set_permutation, // column 3 + enable_second_set_permutation) // column 4 + + RefVector get_selectors() + { + return { lagrange_first, + enable_tuple_set_permutation, + enable_single_column_permutation, + enable_first_set_permutation, + enable_second_set_permutation }; + }; + RefVector get_sigma_polynomials() { return {}; }; + RefVector get_id_polynomials() { return {}; }; + RefVector get_table_polynomials() { return {}; }; + }; + + /** + * @brief Container for all witness polynomials used/constructed by the prover. + * @details Shifts are not included here since they do not occupy their own memory. + */ + + template class WitnessEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, + permutation_set_column_1, // Column 0 + permutation_set_column_2, // Column 1 + permutation_set_column_3, // Column 2 + permutation_set_column_4, // Column 3 + self_permutation_column, // Column 4 + tuple_permutation_inverses, // Column 5 + single_permutation_inverses) // Column 6 + + RefVector get_wires() + { + return { permutation_set_column_1, + permutation_set_column_2, + permutation_set_column_3, + permutation_set_column_4, + self_permutation_column }; + }; + }; + + /** + * @brief A base class labelling all entities (for instance, all of the polynomials used by the prover during + * sumcheck) in this Honk variant along with particular subsets of interest + * @details Used to build containers for: the prover's polynomial during sumcheck; the sumcheck's folded + * polynomials; the univariates consturcted during during sumcheck; the evaluations produced by sumcheck. + * + * Symbolically we have: AllEntities = PrecomputedEntities + WitnessEntities + "ShiftedEntities". It could be + * implemented as such, but we have this now. + */ + + template class AllEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, + lagrange_first, // Column 0 + enable_tuple_set_permutation, // Column 1 + enable_single_column_permutation, // Column 2 + enable_first_set_permutation, // Column 3 + enable_second_set_permutation, // Column 4 + permutation_set_column_1, // Column 5 + permutation_set_column_2, // Column 6 + permutation_set_column_3, // Column 7 + permutation_set_column_4, // Column 8 + self_permutation_column, // Column 9 + tuple_permutation_inverses, // Column 10 + single_permutation_inverses) // Column 11 + + RefVector get_wires() + { + return { + permutation_set_column_1, permutation_set_column_2, permutation_set_column_3, permutation_set_column_4 + }; + }; + RefVector get_unshifted() + { + return { lagrange_first, + enable_tuple_set_permutation, + enable_single_column_permutation, + enable_first_set_permutation, + enable_second_set_permutation, + permutation_set_column_1, + permutation_set_column_2, + permutation_set_column_3, + permutation_set_column_4, + self_permutation_column, + tuple_permutation_inverses, + single_permutation_inverses }; + }; + RefVector get_to_be_shifted() { return {}; }; + RefVector get_shifted() { return {}; }; + }; + + public: + /** + * @brief The proving key is responsible for storing the polynomials used by the prover. + * @note TODO(Cody): Maybe multiple inheritance is the right thing here. In that case, nothing should eve inherit + * from ProvingKey. + */ + class ProvingKey : public ProvingKey_, WitnessEntities> { + public: + // Expose constructors on the base class + using Base = ProvingKey_, WitnessEntities>; + using Base::Base; + + // The plookup wires that store plookup read data. + std::array get_table_column_wires() { return {}; }; + }; + + /** + * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * polynomials used by the verifier. + * + * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve + * that, and split out separate PrecomputedPolynomials/Commitments data for clarity but also for portability of our + * circuits. + */ + using VerificationKey = VerificationKey_>; + + /** + * @brief A field element for each entity of the flavor. These entities represent the prover polynomials evaluated + * at one point. + */ + class AllValues : public AllEntities { + public: + using Base = AllEntities; + using Base::Base; + }; + + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] size_t get_polynomial_size() const { return this->lagrange_first.size(); } + AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** + * @brief A container for polynomials handles; only stores spans. + */ + class ProverPolynomials : public AllEntities { + public: + [[nodiscard]] size_t get_polynomial_size() const { return enable_tuple_set_permutation.size(); } + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + + /** + * @brief A container for storing the partially evaluated multivariates produced by sumcheck. + */ + class PartiallyEvaluatedMultivariates : public AllEntities { + + public: + PartiallyEvaluatedMultivariates() = default; + PartiallyEvaluatedMultivariates(const size_t circuit_size) + { + // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) + for (auto& poly : this->get_all()) { + poly = Polynomial(circuit_size / 2); + } + } + }; + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck. + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template using ProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates produced during the hot loop in sumcheck. + */ + using ExtendedEdges = ProverUnivariates; + + /** + * @brief A container for the witness commitments. + */ + + using WitnessCommitments = WitnessEntities; + + /** + * @brief A container for commitment labels. + * @note It's debatable whether this should inherit from AllEntities. since most entries are not strictly needed. It + * has, however, been useful during debugging to have these labels available. + * + */ + class CommitmentLabels : public AllEntities { + private: + using Base = AllEntities; + + public: + CommitmentLabels() + : AllEntities() + { + Base::permutation_set_column_1 = "PERMUTATION_SET_COLUMN_1"; + Base::permutation_set_column_2 = "PERMUTATION_SET_COLUMN_2"; + Base::permutation_set_column_3 = "PERMUTATION_SET_COLUMN_3"; + Base::permutation_set_column_4 = "PERMUTATION_SET_COLUMN_4"; + Base::self_permutation_column = "SELF_PERMUTATION_COLUMN"; + Base::tuple_permutation_inverses = "TUPLE_PERMUTATION_INVERSES"; + Base::single_permutation_inverses = "SINGLE_PERMUTATION_INVERSES"; + // The ones beginning with "__" are only used for debugging + Base::lagrange_first = "__LAGRANGE_FIRST"; + Base::enable_tuple_set_permutation = "__ENABLE_SET_PERMUTATION"; + Base::enable_single_column_permutation = "__ENABLE_SINGLE_COLUMN_PERMUTATION"; + Base::enable_first_set_permutation = "__ENABLE_FIRST_SET_PERMUTATION"; + Base::enable_second_set_permutation = "__ENABLE_SECOND_SET_PERMUTATION"; + }; + }; + + class VerifierCommitments : public AllEntities { + + public: + VerifierCommitments(const std::shared_ptr& verification_key) + { + lagrange_first = verification_key->lagrange_first; + enable_tuple_set_permutation = verification_key->enable_tuple_set_permutation; + enable_single_column_permutation = verification_key->enable_single_column_permutation; + enable_first_set_permutation = verification_key->enable_first_set_permutation; + enable_second_set_permutation = verification_key->enable_second_set_permutation; + } + }; + + /** + * @brief Derived class that defines proof structure for ECCVM proofs, as well as supporting functions. + * + */ + class Transcript : public BaseTranscript { + public: + uint32_t circuit_size; + Commitment column_0_comm; + Commitment column_1_comm; + Commitment permutation_inverses_comm; + std::vector> sumcheck_univariates; + std::array sumcheck_evaluations; + + std::vector zm_cq_comms; + Commitment zm_cq_comm; + Commitment zm_pi_comm; + + Transcript() = default; + + Transcript(const std::vector& proof) + : BaseTranscript(proof) + {} + + void deserialize_full_transcript() + { + // TODO. Codepath is dead for now, because there is no composer + abort(); + // take current proof and put them into the struct + } + + void serialize_full_transcript() + { + // TODO. Codepath is dead for now, because there is no composer + abort(); + } + }; +}; + +// NOLINTEND(cppcoreguidelines-avoid-const-or-ref-data-members) + +} // namespace flavor +namespace sumcheck { + +DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(GenericPermutationRelationImpl, flavor::ToyAVM) + +} // namespace sumcheck +} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp b/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp index 065a70aed2f..450571eaaba 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/ultra.hpp @@ -207,6 +207,18 @@ class Ultra { }; }; + + RefVector get_precomputed() + { + return { q_m, q_c, q_l, q_r, q_o, q_4, q_arith, q_sort, + q_elliptic, q_aux, q_lookup, sigma_1, sigma_2, sigma_3, sigma_4, id_1, + id_2, id_3, id_4, table_1, table_2, table_3, table_4, lagrange_first, + lagrange_last + + }; + } + + RefVector get_witness() { return { w_l, w_r, w_o, w_4, sorted_accum, z_perm, z_lookup }; }; RefVector get_to_be_shifted() { return { table_1, table_2, table_3, table_4, w_l, w_r, w_o, w_4, sorted_accum, z_perm, z_lookup }; @@ -248,8 +260,8 @@ class Ultra { using VerificationKey = VerificationKey_>; /** - * @brief A field element for each entity of the flavor. These entities represent the prover polynomials evaluated - * at one point. + * @brief A field element for each entity of the flavor. These entities represent the prover polynomials + * evaluated at one point. */ class AllValues : public AllEntities { public: @@ -273,6 +285,29 @@ class Ultra { } }; + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** * @brief A container for storing the partially evaluated multivariates produced by sumcheck. */ @@ -323,32 +358,31 @@ class Ultra { z_lookup = "Z_LOOKUP"; sorted_accum = "SORTED_ACCUM"; - // The ones beginning with "__" are only used for debugging - q_c = "__Q_C"; - q_l = "__Q_L"; - q_r = "__Q_R"; - q_o = "__Q_O"; - q_4 = "__Q_4"; - q_m = "__Q_M"; - q_arith = "__Q_ARITH"; - q_sort = "__Q_SORT"; - q_elliptic = "__Q_ELLIPTIC"; - q_aux = "__Q_AUX"; - q_lookup = "__Q_LOOKUP"; - sigma_1 = "__SIGMA_1"; - sigma_2 = "__SIGMA_2"; - sigma_3 = "__SIGMA_3"; - sigma_4 = "__SIGMA_4"; - id_1 = "__ID_1"; - id_2 = "__ID_2"; - id_3 = "__ID_3"; - id_4 = "__ID_4"; - table_1 = "__TABLE_1"; - table_2 = "__TABLE_2"; - table_3 = "__TABLE_3"; - table_4 = "__TABLE_4"; - lagrange_first = "__LAGRANGE_FIRST"; - lagrange_last = "__LAGRANGE_LAST"; + q_c = "Q_C"; + q_l = "Q_L"; + q_r = "Q_R"; + q_o = "Q_O"; + q_4 = "Q_4"; + q_m = "Q_M"; + q_arith = "Q_ARITH"; + q_sort = "Q_SORT"; + q_elliptic = "Q_ELLIPTIC"; + q_aux = "Q_AUX"; + q_lookup = "Q_LOOKUP"; + sigma_1 = "SIGMA_1"; + sigma_2 = "SIGMA_2"; + sigma_3 = "SIGMA_3"; + sigma_4 = "SIGMA_4"; + id_1 = "ID_1"; + id_2 = "ID_2"; + id_3 = "ID_3"; + id_4 = "ID_4"; + table_1 = "TABLE_1"; + table_2 = "TABLE_2"; + table_3 = "TABLE_3"; + table_4 = "TABLE_4"; + lagrange_first = "LAGRANGE_FIRST"; + lagrange_last = "LAGRANGE_LAST"; }; }; @@ -357,11 +391,11 @@ class Ultra { VerifierCommitments(const std::shared_ptr& verification_key) { q_m = verification_key->q_m; + q_c = verification_key->q_c; q_l = verification_key->q_l; q_r = verification_key->q_r; q_o = verification_key->q_o; q_4 = verification_key->q_4; - q_c = verification_key->q_c; q_arith = verification_key->q_arith; q_sort = verification_key->q_sort; q_elliptic = verification_key->q_elliptic; @@ -386,7 +420,7 @@ class Ultra { class FoldingParameters { public: - std::vector gate_separation_challenges; + std::vector gate_challenges; FF target_sum; }; diff --git a/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp b/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp index 5f068a5c2d4..df0ac96c120 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp @@ -251,7 +251,7 @@ template class UltraRecursive_ { * @param builder * @param native_key Native verification key from which to extract the precomputed commitments */ - VerificationKey(CircuitBuilder* builder, std::shared_ptr native_key) + VerificationKey(CircuitBuilder* builder, const std::shared_ptr& native_key) : VerificationKey_>(native_key->circuit_size, native_key->num_public_inputs) { this->q_m = Commitment::from_witness(builder, native_key->q_m); diff --git a/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt index 248b05c02e6..adaa9814aed 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(goblin ultra_honk eccvm translator_vm) \ No newline at end of file +barretenberg_module(goblin stdlib_recursion ultra_honk eccvm translator_vm) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp b/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp deleted file mode 100644 index 09563987010..00000000000 --- a/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp +++ /dev/null @@ -1,196 +0,0 @@ -#include "barretenberg/eccvm/eccvm_composer.hpp" -#include "barretenberg/goblin/translation_evaluations.hpp" -#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" -#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" -#include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" -#include "barretenberg/translator_vm/goblin_translator_composer.hpp" -#include "barretenberg/ultra_honk/ultra_composer.hpp" - -#include - -using namespace proof_system::honk; - -namespace test_full_goblin_composer { - -namespace { -auto& engine = numeric::random::get_debug_engine(); -} - -class FullGoblinComposerTests : public ::testing::Test { - protected: - static void SetUpTestSuite() - { - barretenberg::srs::init_crs_factory("../srs_db/ignition"); - barretenberg::srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); - } - - using Curve = curve::BN254; - using FF = Curve::ScalarField; - using Fbase = Curve::BaseField; - using Point = Curve::AffineElement; - using CommitmentKey = pcs::CommitmentKey; - using OpQueue = proof_system::ECCOpQueue; - using ECCVMFlavor = flavor::ECCVM; - using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; - using ECCVMComposer = ECCVMComposer_; - - static constexpr size_t NUM_OP_QUEUE_COLUMNS = flavor::GoblinUltra::NUM_WIRES; - - /** - * @brief Generate a simple test circuit with some ECC op gates and conventional arithmetic gates - * - * @param builder - */ - static void generate_test_circuit(proof_system::GoblinUltraCircuitBuilder& builder) - { - // Add some arbitrary ecc op gates - for (size_t i = 0; i < 3; ++i) { - auto point = Point::random_element(); - auto scalar = FF::random_element(); - builder.queue_ecc_add_accum(point); - builder.queue_ecc_mul_accum(point, scalar); - } - // queues the result of the preceding ECC - builder.queue_ecc_eq(); // should be eq and reset - - // Add some conventional gates that utilize public inputs - for (size_t i = 0; i < 10; ++i) { - FF a = FF::random_element(); - FF b = FF::random_element(); - FF c = FF::random_element(); - FF d = a + b + c; - uint32_t a_idx = builder.add_public_variable(a); - uint32_t b_idx = builder.add_variable(b); - uint32_t c_idx = builder.add_variable(c); - uint32_t d_idx = builder.add_variable(d); - - builder.create_big_add_gate({ a_idx, b_idx, c_idx, d_idx, FF(1), FF(1), FF(1), FF(-1), FF(0) }); - } - } - - /** - * @brief Mock the interactions of a simple curcuit with the op_queue - * @details The transcript aggregation protocol in the Goblin proof system can not yet support an empty "previous - * transcript" (see issue #723). This function mocks the interactions with the op queue of a fictional "first" - * circuit. This way, when we go to generate a proof over our first "real" circuit, the transcript aggregation - * protocol can proceed nominally. The mock data is valid in the sense that it can be processed by all stages of - * Goblin as if it came from a genuine circuit. - * - * @todo WOKTODO: this is a zero commitments issue - * - * @param op_queue - */ - static void perform_op_queue_interactions_for_mock_first_circuit( - std::shared_ptr& op_queue) - { - proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; - - // Add a mul accum op and an equality op - auto point = Point::one() * FF::random_element(); - auto scalar = FF::random_element(); - builder.queue_ecc_mul_accum(point, scalar); - builder.queue_ecc_eq(); - - op_queue->set_size_data(); - - // Manually compute the op queue transcript commitments (which would normally be done by the prover) - auto crs_factory_ = barretenberg::srs::get_crs_factory(); - auto commitment_key = CommitmentKey(op_queue->get_current_size(), crs_factory_); - std::array op_queue_commitments; - size_t idx = 0; - for (auto& entry : op_queue->get_aggregate_transcript()) { - op_queue_commitments[idx++] = commitment_key.commit(entry); - } - // Store the commitment data for use by the prover of the next circuit - op_queue->set_commitment_data(op_queue_commitments); - } - - /** - * @brief Construct and a verify a Honk proof - * - */ - static bool construct_and_verify_honk_proof(GoblinUltraComposer& composer, - proof_system::GoblinUltraCircuitBuilder& builder) - { - auto instance = composer.create_instance(builder); - auto prover = composer.create_prover(instance); - auto verifier = composer.create_verifier(instance); - auto proof = prover.construct_proof(); - bool verified = verifier.verify_proof(proof); - - return verified; - } - - /** - * @brief Construct and verify a Goblin ECC op queue merge proof - * - */ - static bool construct_and_verify_merge_proof(GoblinUltraComposer& composer, std::shared_ptr& op_queue) - { - auto merge_prover = composer.create_merge_prover(op_queue); - auto merge_verifier = composer.create_merge_verifier(/*srs_size=*/10); - auto merge_proof = merge_prover.construct_proof(); - bool verified = merge_verifier.verify_proof(merge_proof); - - return verified; - } -}; - -/** - * @brief Test proof construction/verification for a circuit with ECC op gates, public inputs, and basic arithmetic - * gates - * @note We simulate op queue interactions with a previous circuit so the actual circuit under test utilizes an op queue - * with non-empty 'previous' data. This avoids complications with zero-commitments etc. - * - */ -TEST_F(FullGoblinComposerTests, SimpleCircuit) -{ - auto op_queue = std::make_shared(); - - // Add mock data to op queue to simulate interaction with a "first" circuit - perform_op_queue_interactions_for_mock_first_circuit(op_queue); - - // Construct a series of simple Goblin circuits; generate and verify their proofs - size_t NUM_CIRCUITS = 3; - for (size_t circuit_idx = 0; circuit_idx < NUM_CIRCUITS; ++circuit_idx) { - proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; - - generate_test_circuit(builder); - - // The same composer is used to manage Honk and Merge prover/verifier - proof_system::honk::GoblinUltraComposer composer; - - // Construct and verify Ultra Goblin Honk proof - bool honk_verified = construct_and_verify_honk_proof(composer, builder); - EXPECT_TRUE(honk_verified); - - // Construct and verify op queue merge proof - bool merge_verified = construct_and_verify_merge_proof(composer, op_queue); - EXPECT_TRUE(merge_verified); - } - - // Execute the ECCVM - // TODO(https://github.com/AztecProtocol/barretenberg/issues/785) Properly initialize transcript - auto eccvm_builder = ECCVMBuilder(op_queue); - auto eccvm_composer = ECCVMComposer(); - auto eccvm_prover = eccvm_composer.create_prover(eccvm_builder); - auto eccvm_verifier = eccvm_composer.create_verifier(eccvm_builder); - auto eccvm_proof = eccvm_prover.construct_proof(); - bool eccvm_verified = eccvm_verifier.verify_proof(eccvm_proof); - EXPECT_TRUE(eccvm_verified); - - // Execute the Translator - // TODO(https://github.com/AztecProtocol/barretenberg/issues/786) Properly derive batching_challenge - auto batching_challenge = Fbase::random_element(); - auto evaluation_input = eccvm_prover.evaluation_challenge_x; - proof_system::GoblinTranslatorCircuitBuilder translator_builder{ batching_challenge, evaluation_input, op_queue }; - GoblinTranslatorComposer translator_composer; - GoblinTranslatorProver translator_prover = translator_composer.create_prover(translator_builder); - GoblinTranslatorVerifier translator_verifier = translator_composer.create_verifier(translator_builder); - proof_system::plonk::proof translator_proof = translator_prover.construct_proof(); - bool accumulator_construction_verified = translator_verifier.verify_proof(translator_proof); - bool translation_verified = translator_verifier.verify_translation(eccvm_prover.translation_evaluations); - EXPECT_TRUE(accumulator_construction_verified && translation_verified); -} -// TODO(https://github.com/AztecProtocol/barretenberg/issues/787) Expand these tests. -} // namespace test_full_goblin_composer diff --git a/barretenberg/cpp/src/barretenberg/goblin/full_goblin_recursion.test.cpp b/barretenberg/cpp/src/barretenberg/goblin/full_goblin_recursion.test.cpp new file mode 100644 index 00000000000..2adc5049600 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/goblin/full_goblin_recursion.test.cpp @@ -0,0 +1,100 @@ +#include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/goblin/goblin.hpp" +#include "barretenberg/goblin/mock_circuits.hpp" +#include "barretenberg/goblin/translation_evaluations.hpp" +#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" +#include "barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp" +#include "barretenberg/translator_vm/goblin_translator_composer.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +#include + +using namespace proof_system::honk; +namespace goblin_recursion_tests { + +class GoblinRecursionTests : public ::testing::Test { + protected: + static void SetUpTestSuite() + { + barretenberg::srs::init_crs_factory("../srs_db/ignition"); + barretenberg::srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); + } + + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using GoblinUltraBuilder = proof_system::GoblinUltraCircuitBuilder; + using ECCVMFlavor = flavor::ECCVM; + using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; + using ECCVMComposer = ECCVMComposer_; + using TranslatorFlavor = flavor::GoblinTranslator; + using TranslatorBuilder = proof_system::GoblinTranslatorCircuitBuilder; + using TranslatorComposer = GoblinTranslatorComposer; + using TranslatorConsistencyData = barretenberg::TranslationEvaluations; + using RecursiveFlavor = flavor::GoblinUltraRecursive_; + using RecursiveVerifier = proof_system::plonk::stdlib::recursion::honk::UltraRecursiveVerifier_; + using Goblin = barretenberg::Goblin; + using KernelInput = Goblin::AccumulationOutput; + using UltraVerifier = UltraVerifier_; + + /** + * @brief Construct a mock kernel circuit + * @details This circuit contains (1) some basic/arbitrary arithmetic gates, (2) a genuine recursive verification of + * the proof provided as input. It does not contain any other real kernel logic. + * + * @param builder + * @param kernel_input A proof to be recursively verified and the corresponding native verification key + */ + static void construct_mock_kernel_circuit(GoblinUltraBuilder& builder, KernelInput& kernel_input) + { + // Generic operations e.g. state updates (just arith gates for now) + GoblinTestingUtils::construct_arithmetic_circuit(builder); + + // Execute recursive aggregation of previous kernel proof + RecursiveVerifier verifier{ &builder, kernel_input.verification_key }; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/801): Aggregation + auto pairing_points = verifier.verify_proof(kernel_input.proof); // previous kernel proof + // TODO(https://github.com/AztecProtocol/barretenberg/issues/803): Mock app circuit. In the absence of a mocked + // app circuit proof, we simply perform another recursive verification for the previous kernel proof to + // approximate the work done for the app proof. + pairing_points = verifier.verify_proof(kernel_input.proof); + } +}; + +/** + * @brief A full Goblin test that mimicks the basic aztec client architecture + * + */ +TEST_F(GoblinRecursionTests, Pseudo) +{ + Goblin goblin; + + // Construct an initial circuit; its proof will be recursively verified by the first kernel + GoblinUltraBuilder initial_circuit{ goblin.op_queue }; + GoblinTestingUtils::construct_simple_initial_circuit(initial_circuit); + + KernelInput kernel_input = goblin.accumulate(initial_circuit); + + // Construct a series of simple Goblin circuits; generate and verify their proofs + size_t NUM_CIRCUITS = 2; + for (size_t circuit_idx = 0; circuit_idx < NUM_CIRCUITS; ++circuit_idx) { + // Construct a circuit with logic resembling that of the "kernel circuit" + GoblinUltraBuilder circuit_builder{ goblin.op_queue }; + construct_mock_kernel_circuit(circuit_builder, kernel_input); + + // Construct proof of the current kernel circuit to be recursively verified by the next one + kernel_input = goblin.accumulate(circuit_builder); + } + + Goblin::Proof proof = goblin.prove(); + // Verify the final ultra proof + UltraVerifier ultra_verifier{ kernel_input.verification_key }; + bool ultra_verified = ultra_verifier.verify_proof(kernel_input.proof); + // Verify the goblin proof (eccvm, translator, merge) + bool verified = goblin.verify(proof); + EXPECT_TRUE(ultra_verified && verified); +} + +// TODO(https://github.com/AztecProtocol/barretenberg/issues/787) Expand these tests. +} // namespace goblin_recursion_tests diff --git a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp new file mode 100644 index 00000000000..3c2588288df --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp @@ -0,0 +1,133 @@ +#pragma once + +#include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp" +#include "barretenberg/translator_vm/goblin_translator_composer.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +namespace barretenberg { + +class Goblin { + using HonkProof = proof_system::plonk::proof; + + public: + /** + * @brief Output of goblin::accumulate; an Ultra proof and the corresponding verification key + * + */ + struct AccumulationOutput { + using NativeVerificationKey = proof_system::honk::flavor::GoblinUltra::VerificationKey; + HonkProof proof; + std::shared_ptr verification_key; + }; + + struct Proof { + HonkProof merge_proof; + HonkProof eccvm_proof; + HonkProof translator_proof; + TranslationEvaluations translation_evaluations; + }; + + using Fr = barretenberg::fr; + using Fq = barretenberg::fq; + + using Transcript = proof_system::honk::BaseTranscript; + using GoblinUltraComposer = proof_system::honk::GoblinUltraComposer; + using GoblinUltraCircuitBuilder = proof_system::GoblinUltraCircuitBuilder; + using OpQueue = proof_system::ECCOpQueue; + using ECCVMFlavor = proof_system::honk::flavor::ECCVM; + using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; + using ECCVMComposer = proof_system::honk::ECCVMComposer; + using TranslatorBuilder = proof_system::GoblinTranslatorCircuitBuilder; + using TranslatorComposer = proof_system::honk::GoblinTranslatorComposer; + using RecursiveMergeVerifier = + proof_system::plonk::stdlib::recursion::goblin::MergeRecursiveVerifier_; + using MergeVerifier = proof_system::honk::MergeVerifier_; + + std::shared_ptr op_queue = std::make_shared(); + + HonkProof merge_proof; + + // on the first call to accumulate there is no merge proof to verify + bool merge_proof_exists{ false }; + + private: + // TODO(https://github.com/AztecProtocol/barretenberg/issues/798) unique_ptr use is a hack + std::unique_ptr eccvm_builder; + std::unique_ptr translator_builder; + std::unique_ptr eccvm_composer; + std::unique_ptr translator_composer; + + public: + /** + * @brief + * + * @param circuit_builder + */ + AccumulationOutput accumulate(GoblinUltraCircuitBuilder& circuit_builder) + { + // Complete the circuit logic by recursively verifying previous merge proof if it exists + if (merge_proof_exists) { + RecursiveMergeVerifier merge_verifier{ &circuit_builder }; + [[maybe_unused]] auto pairing_points = merge_verifier.verify_proof(merge_proof); + } + + // Construct a Honk proof for the main circuit + GoblinUltraComposer composer; + auto instance = composer.create_instance(circuit_builder); + auto prover = composer.create_prover(instance); + auto ultra_proof = prover.construct_proof(); + + // Construct and store the merge proof to be recursively verified on the next call to accumulate + auto merge_prover = composer.create_merge_prover(op_queue); + merge_proof = merge_prover.construct_proof(); + + if (!merge_proof_exists) { + merge_proof_exists = true; + } + + return { ultra_proof, instance->verification_key }; + }; + + Proof prove() + { + Proof proof; + + proof.merge_proof = std::move(merge_proof); + + eccvm_builder = std::make_unique(op_queue); + eccvm_composer = std::make_unique(); + auto eccvm_prover = eccvm_composer->create_prover(*eccvm_builder); + proof.eccvm_proof = eccvm_prover.construct_proof(); + proof.translation_evaluations = eccvm_prover.translation_evaluations; + + translator_builder = std::make_unique( + eccvm_prover.translation_batching_challenge_v, eccvm_prover.evaluation_challenge_x, op_queue); + translator_composer = std::make_unique(); + auto translator_prover = translator_composer->create_prover(*translator_builder, eccvm_prover.transcript); + proof.translator_proof = translator_prover.construct_proof(); + + return proof; + }; + + bool verify(const Proof& proof) + { + MergeVerifier merge_verifier; + bool merge_verified = merge_verifier.verify_proof(proof.merge_proof); + + auto eccvm_verifier = eccvm_composer->create_verifier(*eccvm_builder); + bool eccvm_verified = eccvm_verifier.verify_proof(proof.eccvm_proof); + + auto translator_verifier = translator_composer->create_verifier(*translator_builder, eccvm_verifier.transcript); + bool accumulator_construction_verified = translator_verifier.verify_proof(proof.translator_proof); + // TODO(https://github.com/AztecProtocol/barretenberg/issues/799): + // Ensure translation_evaluations are passed correctly + bool translation_verified = translator_verifier.verify_translation(proof.translation_evaluations); + + return merge_verified && eccvm_verified && accumulator_construction_verified && translation_verified; + }; +}; +} // namespace barretenberg \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp new file mode 100644 index 00000000000..5199a65ab63 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp @@ -0,0 +1,96 @@ +#include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/flavor/goblin_ultra.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/srs/global_crs.hpp" + +namespace barretenberg { +class GoblinTestingUtils { + public: + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using Fbase = Curve::BaseField; + using Point = Curve::AffineElement; + using CommitmentKey = proof_system::honk::pcs::CommitmentKey; + using OpQueue = proof_system::ECCOpQueue; + using GoblinUltraBuilder = proof_system::GoblinUltraCircuitBuilder; + using Flavor = proof_system::honk::flavor::GoblinUltra; + static constexpr size_t NUM_OP_QUEUE_COLUMNS = Flavor::NUM_WIRES; + + static void construct_arithmetic_circuit(GoblinUltraBuilder& builder) + { + // Add some arithmetic gates that utilize public inputs + for (size_t i = 0; i < 10; ++i) { + FF a = FF::random_element(); + FF b = FF::random_element(); + FF c = FF::random_element(); + FF d = a + b + c; + uint32_t a_idx = builder.add_public_variable(a); + uint32_t b_idx = builder.add_variable(b); + uint32_t c_idx = builder.add_variable(c); + uint32_t d_idx = builder.add_variable(d); + + builder.create_big_add_gate({ a_idx, b_idx, c_idx, d_idx, FF(1), FF(1), FF(1), FF(-1), FF(0) }); + } + } + + /** + * @brief Mock the interactions of a simple curcuit with the op_queue + * @todo The transcript aggregation protocol in the Goblin proof system can not yet support an empty "previous + * transcript" (see issue #723) because the corresponding commitments are zero / the point at infinity. This + * function mocks the interactions with the op queue of a fictional "first" circuit. This way, when we go to + * generate a proof over our first "real" circuit, the transcript aggregation protocol can proceed nominally. The + * mock data is valid in the sense that it can be processed by all stages of Goblin as if it came from a genuine + * circuit. + * + * + * @param op_queue + */ + static void perform_op_queue_interactions_for_mock_first_circuit( + std::shared_ptr& op_queue) + { + proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; + + // Add a mul accum op and an equality op + auto point = Point::one() * FF::random_element(); + auto scalar = FF::random_element(); + builder.queue_ecc_mul_accum(point, scalar); + builder.queue_ecc_eq(); + + op_queue->set_size_data(); + + // Manually compute the op queue transcript commitments (which would normally be done by the merge prover) + auto crs_factory_ = barretenberg::srs::get_crs_factory(); + auto commitment_key = CommitmentKey(op_queue->get_current_size(), crs_factory_); + std::array op_queue_commitments; + size_t idx = 0; + for (auto& entry : op_queue->get_aggregate_transcript()) { + op_queue_commitments[idx++] = commitment_key.commit(entry); + } + // Store the commitment data for use by the prover of the next circuit + op_queue->set_commitment_data(op_queue_commitments); + } + + /** + * @brief Generate a simple test circuit with some ECC op gates and conventional arithmetic gates + * + * @param builder + */ + static void construct_simple_initial_circuit(GoblinUltraBuilder& builder) + { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/800) Testing cleanup + perform_op_queue_interactions_for_mock_first_circuit(builder.op_queue); + + // Add some arbitrary ecc op gates + for (size_t i = 0; i < 3; ++i) { + auto point = Point::random_element(); + auto scalar = FF::random_element(); + builder.queue_ecc_add_accum(point); + builder.queue_ecc_mul_accum(point, scalar); + } + // queues the result of the preceding ECC + builder.queue_ecc_eq(); // should be eq and reset + + construct_arithmetic_circuit(builder); + } +}; +} // namespace barretenberg \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp similarity index 58% rename from barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp rename to barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp index 820bc2907a5..4a86cf74075 100644 --- a/barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp @@ -1,7 +1,7 @@ #pragma once #include -namespace proof_system::honk::lookup_library { +namespace proof_system::honk::logderivative_library { /** * @brief Compute the inverse polynomial I(X) required for logderivative lookups @@ -29,12 +29,12 @@ void compute_logderivative_inverse(Polynomials& polynomials, auto& relation_para using Accumulator = typename Relation::ValueAccumulator0; constexpr size_t READ_TERMS = Relation::READ_TERMS; constexpr size_t WRITE_TERMS = Relation::WRITE_TERMS; - auto& inverse_polynomial = polynomials.lookup_inverses; auto lookup_relation = Relation(); + auto& inverse_polynomial = lookup_relation.template get_inverse_polynomial(polynomials); for (size_t i = 0; i < circuit_size; ++i) { auto row = polynomials.get_row(i); - bool has_inverse = lookup_relation.lookup_exists_at_row(row); + bool has_inverse = lookup_relation.operation_exists_at_row(row); if (!has_inverse) { continue; } @@ -97,7 +97,7 @@ void accumulate_logderivative_lookup_subrelation_contributions(ContainerOverSubr using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; - auto lookup_inverses = View(in.lookup_inverses); + auto lookup_inverses = View(lookup_relation.template get_inverse_polynomial(in)); constexpr size_t NUM_TOTAL_TERMS = READ_TERMS + WRITE_TERMS; std::array lookup_terms; @@ -153,4 +153,98 @@ void accumulate_logderivative_lookup_subrelation_contributions(ContainerOverSubr }); } -} // namespace proof_system::honk::lookup_library \ No newline at end of file +/** + * @brief Compute generic log-derivative set permutation subrelation accumulation + * @details The generic log-derivative lookup relation consistes of two subrelations. The first demonstrates that the + * inverse polynomial I, defined via I = 1/[(read_term) * (write_term)], has been computed correctly. The second + * establishes the correctness of the permutation itself based on the log-derivative argument. Note that the + * latter subrelation is "linearly dependent" in the sense that it establishes that a sum across all rows of the + * execution trace is zero, rather than that some expression holds independently at each row. Accordingly, this + * subrelation is not multiplied by a scaling factor at each accumulation step. The subrelation expressions are + * respectively: + * + * I * (read_term) * (write_term) - q_{permutation_enabler} = 0 + * + * \sum_{i=0}^{n-1} [q_{write_enabler} * I * write_term + q_{read_enabler} * I * read_term] = 0 + * + * The explicit expressions for read_term and write_term are dependent upon the particular structure of the permutation + * being performed and methods for computing them must be defined in the corresponding relation class. The entities + * which are used to determine the use of permutation (is it enabled, is the first "read" set enabled, is the second + * "write" set enabled) must be defined in the relation class. + * + * @tparam FF + * @tparam Relation + * @tparam ContainerOverSubrelations + * @tparam AllEntities + * @tparam Parameters + * @param accumulator + * @param in + * @param params + * @param scaling_factor + */ +template +void accumulate_logderivative_permutation_subrelation_contributions(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor) +{ + constexpr size_t READ_TERMS = Relation::READ_TERMS; + constexpr size_t WRITE_TERMS = Relation::WRITE_TERMS; + + // For now we only do simple permutations over tuples with 1 read and 1 write term + static_assert(READ_TERMS == 1); + static_assert(WRITE_TERMS == 1); + + auto permutation_relation = Relation(); + + using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; + using View = typename Accumulator::View; + + auto permutation_inverses = View(permutation_relation.template get_inverse_polynomial(in)); + + constexpr size_t NUM_TOTAL_TERMS = 2; + std::array permutation_terms; + std::array denominator_accumulator; + + // The permutation relation = 1 / read_term - 1 / write_term + // To get the inverses (1 / read_term), (1 / write_term), we have a commitment to the product ofinver ses + // i.e. permutation_inverses = (1 / read_term) * (1 / write_term) + // The purpose of this next section is to derive individual inverse terms using `permutation_inverses` + // i.e. (1 / read_term) = permutation_inverses * write_term + // (1 / write_term) = permutation_inverses * read_term + permutation_terms[0] = permutation_relation.template compute_read_term(in, params); + permutation_terms[1] = permutation_relation.template compute_write_term(in, params); + + barretenberg::constexpr_for<0, NUM_TOTAL_TERMS, 1>( + [&]() { denominator_accumulator[i] = permutation_terms[i]; }); + + barretenberg::constexpr_for<0, NUM_TOTAL_TERMS - 1, 1>( + [&]() { denominator_accumulator[i + 1] *= denominator_accumulator[i]; }); + + auto inverse_accumulator = Accumulator(permutation_inverses); // denominator_accumulator[NUM_TOTAL_TERMS - 1]; + + const auto inverse_exists = permutation_relation.template compute_inverse_exists(in); + + // Note: the lookup_inverses are computed so that the value is 0 if !inverse_exists + std::get<0>(accumulator) += + (denominator_accumulator[NUM_TOTAL_TERMS - 1] * permutation_inverses - inverse_exists) * scaling_factor; + + // After this algo, total degree of denominator_accumulator = NUM_TOTAL_TERMS + for (size_t i = 0; i < NUM_TOTAL_TERMS - 1; ++i) { + denominator_accumulator[NUM_TOTAL_TERMS - 1 - i] = + denominator_accumulator[NUM_TOTAL_TERMS - 2 - i] * inverse_accumulator; + inverse_accumulator = inverse_accumulator * permutation_terms[NUM_TOTAL_TERMS - 1 - i]; + } + denominator_accumulator[0] = inverse_accumulator; + + // each predicate is degree-1 + // degree of relation at this point = NUM_TOTAL_TERMS + 1 + std::get<1>(accumulator) += + permutation_relation.template compute_read_term_predicate(in) * denominator_accumulator[0]; + + // each predicate is degree-1 + // degree of relation = NUM_TOTAL_TERMS + 1 + std::get<1>(accumulator) -= + permutation_relation.template compute_write_term_predicate(in) * denominator_accumulator[1]; +} +} // namespace proof_system::honk::logderivative_library \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp b/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp index 37c056609ee..dda23952741 100644 --- a/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/utils/testing.hpp @@ -9,18 +9,17 @@ namespace proof_system::honk { * function returns an array of data pointed to by the ProverPolynomials. */ template -std::pair, Flavor::NUM_ALL_ENTITIES>, - typename Flavor::ProverPolynomials> -get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t starting_value) +std::pair get_sequential_prover_polynomials( + const size_t log_circuit_size, const size_t starting_value) { using FF = typename Flavor::FF; using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; - std::array, Flavor::NUM_ALL_ENTITIES> storage; + typename Flavor::AllPolynomials storage; size_t circuit_size = 1 << log_circuit_size; size_t value_idx = starting_value; - for (auto& polynomial : storage) { + for (auto& polynomial : storage.get_all()) { polynomial = Polynomial(circuit_size); for (auto& value : polynomial) { value = FF(value_idx++); @@ -28,7 +27,7 @@ get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t st } ProverPolynomials prover_polynomials; - for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage)) { + for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage.get_all())) { prover_poly = storage_poly; } @@ -36,17 +35,16 @@ get_sequential_prover_polynomials(const size_t log_circuit_size, const size_t st } template -std::pair, Flavor::NUM_ALL_ENTITIES>, - typename Flavor::ProverPolynomials> -get_zero_prover_polynomials(const size_t log_circuit_size) +std::pair get_zero_prover_polynomials( + const size_t log_circuit_size) { using FF = typename Flavor::FF; using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; - std::array, Flavor::NUM_ALL_ENTITIES> storage; + typename Flavor::AllPolynomials storage; size_t circuit_size = 1 << log_circuit_size; - for (auto& polynomial : storage) { + for (auto& polynomial : storage.get_all()) { polynomial = Polynomial(circuit_size); for (auto& value : polynomial) { value = FF(0); @@ -54,7 +52,7 @@ get_zero_prover_polynomials(const size_t log_circuit_size) } ProverPolynomials prover_polynomials; - for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage)) { + for (auto [prover_poly, storage_poly] : zip_view(prover_polynomials.get_all(), storage.get_all())) { prover_poly = storage_poly; } diff --git a/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp b/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp index c4b5fda266c..3e150e4a2d5 100644 --- a/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp +++ b/barretenberg/cpp/src/barretenberg/honk/utils/testing.test.cpp @@ -10,8 +10,8 @@ TEST(HonkTestingUtils, ProverPolynomials) auto [storage, prover_polynomials] = proof_system::honk::get_sequential_prover_polynomials(/*log_circuit_size=*/2, /*starting_value=*/0); auto& first_polynomial = prover_polynomials.get_all()[0]; - EXPECT_EQ(storage[0][0], first_polynomial[0]); - EXPECT_EQ(storage[0][1], first_polynomial[1]); + EXPECT_EQ(storage.get_all()[0][0], first_polynomial[0]); + EXPECT_EQ(storage.get_all()[0][1], first_polynomial[1]); }; } // namespace barretenberg::test_testing_utils diff --git a/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp b/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp index 3dc320abe2d..e229f9b7d23 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp +++ b/barretenberg/cpp/src/barretenberg/numeric/uint128/uint128.hpp @@ -5,6 +5,7 @@ #ifdef __i386__ #include "barretenberg/common/serialize.hpp" +#include namespace numeric { @@ -37,7 +38,7 @@ class alignas(32) uint128_t { constexpr ~uint128_t() = default; explicit constexpr operator bool() const { return static_cast(data[0]); }; - template explicit constexpr operator T() const { return static_cast(data[0]); }; + template explicit constexpr operator T() const { return static_cast(data[0]); }; [[nodiscard]] constexpr bool get_bit(uint64_t bit_index) const; [[nodiscard]] constexpr uint64_t get_msb() const; diff --git a/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp b/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp index d8cd9ef2f18..5ddaa9713ae 100644 --- a/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp +++ b/barretenberg/cpp/src/barretenberg/numeric/uint256/uint256.hpp @@ -14,6 +14,7 @@ #include "../uint128/uint128.hpp" #include "barretenberg/common/serialize.hpp" #include "barretenberg/common/throw_or_abort.hpp" +#include #include #include #include @@ -91,7 +92,7 @@ class alignas(32) uint256_t { explicit constexpr operator bool() const { return static_cast(data[0]); }; - template explicit constexpr operator T() const { return static_cast(data[0]); }; + template explicit constexpr operator T() const { return static_cast(data[0]); }; [[nodiscard]] constexpr bool get_bit(uint64_t bit_index) const; [[nodiscard]] constexpr uint64_t get_msb() const; diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp index 4abbd5bc91f..bdcbd4fa4ad 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp @@ -7,7 +7,7 @@ #include "barretenberg/ecc/curves/bn254/fr.hpp" #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" #include "barretenberg/flavor/ecc_vm.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/proof_system/op_queue/ecc_op_queue.hpp" #include "barretenberg/relations/relation_parameters.hpp" @@ -505,9 +505,9 @@ template class ECCVMCircuitBuilder { auto polynomials = compute_polynomials(); const size_t num_rows = polynomials.get_polynomial_size(); - proof_system::honk::lookup_library::compute_logderivative_inverse>( - polynomials, params, num_rows); + proof_system::honk::logderivative_library:: + compute_logderivative_inverse>( + polynomials, params, num_rows); honk::permutation_library::compute_permutation_grand_product>( num_rows, polynomials, params); diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp deleted file mode 100644 index a233692ebf7..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp +++ /dev/null @@ -1,99 +0,0 @@ - - -// AUTOGENERATED FILE -#pragma once - -#include "barretenberg/common/throw_or_abort.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/circuit_builder/circuit_builder_base.hpp" - -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -class FibCircuitBuilder { - public: - using Flavor = proof_system::honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Row = Fib_vm::Row; - - // TODO: template - using Polynomial = Flavor::Polynomial; - using AllPolynomials = Flavor::AllPolynomials; - - static constexpr size_t num_fixed_columns = 6; - static constexpr size_t num_polys = 4; - std::vector rows; - - void set_trace(std::vector&& trace) { rows = std::move(trace); } - - AllPolynomials compute_polynomials() - { - const auto num_rows = get_circuit_subgroup_size(); - AllPolynomials polys; - - // Allocate mem for each column - for (auto& poly : polys.get_all()) { - poly = Polynomial(num_rows); - } - - for (size_t i = 0; i < rows.size(); i++) { - polys.Fibonacci_LAST[i] = rows[i].Fibonacci_LAST; - polys.Fibonacci_FIRST[i] = rows[i].Fibonacci_FIRST; - polys.Fibonacci_x[i] = rows[i].Fibonacci_x; - polys.Fibonacci_y[i] = rows[i].Fibonacci_y; - } - - polys.Fibonacci_x_shift = Polynomial(polys.Fibonacci_x.shifted()); - polys.Fibonacci_y_shift = Polynomial(polys.Fibonacci_y.shifted()); - - return polys; - } - - [[maybe_unused]] bool check_circuit() - { - auto polys = compute_polynomials(); - const size_t num_rows = polys.get_polynomial_size(); - - const auto evaluate_relation = [&](const std::string& relation_name) { - typename Relation::SumcheckArrayOfValuesOverSubrelations result; - for (auto& r : result) { - r = 0; - } - constexpr size_t NUM_SUBRELATIONS = result.size(); - - for (size_t i = 0; i < num_rows; ++i) { - Relation::accumulate(result, polys.get_row(i), {}, 1); - - bool x = true; - for (size_t j = 0; j < NUM_SUBRELATIONS; ++j) { - if (result[j] != 0) { - throw_or_abort( - format("Relation ", relation_name, ", subrelation index ", j, " failed at row ", i)); - x = false; - } - } - if (!x) { - return false; - } - } - return true; - }; - - return evaluate_relation.template operator()>("Fib"); - } - - [[nodiscard]] size_t get_num_gates() const { return rows.size(); } - - [[nodiscard]] size_t get_circuit_subgroup_size() const - { - const size_t num_rows = get_num_gates(); - const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); - size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); - return num_rows_pow2; - } -}; -} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp deleted file mode 100644 index d2d86ba4993..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp +++ /dev/null @@ -1,62 +0,0 @@ -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/arithmetization/arithmetization.hpp" -#include -#include -#include -#include -#include -#include - -#include "./Fib_trace.hpp" - -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -using Row = Fib_vm::Row; - -std::vector FibTraceBuilder::build_trace() -{ - { - std::vector trace; - // Build up the rows - size_t n = 16; - // Build the is_last column - - // Add first row that makes the shifted cols 0 - Row first_row = Row{ .Fibonacci_FIRST = 1 }; - trace.push_back(first_row); - - // The actual first row - Row row = { - .Fibonacci_x = 0, - .Fibonacci_y = 1, - }; - trace.push_back(row); - - for (size_t i = 2; i < n; i++) { - Row prev_row = trace[i - 1]; - - FF x = prev_row.Fibonacci_y; - FF y = prev_row.Fibonacci_x + prev_row.Fibonacci_y; - Row row = { - .Fibonacci_x = x, - .Fibonacci_y = y, - }; - trace.push_back(row); - } - // Build the isLast row - trace[n - 1].Fibonacci_LAST = 1; - - // Build the shifts - for (size_t i = 1; i < n; i++) { - Row& row = trace[i - 1]; - row.Fibonacci_x_shift = trace[(i) % trace.size()].Fibonacci_x; - row.Fibonacci_y_shift = trace[(i) % trace.size()].Fibonacci_y; - } - return trace; - } -} -} // namespace proof_system \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp deleted file mode 100644 index 856400d82b4..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp +++ /dev/null @@ -1,22 +0,0 @@ -#pragma once - -#include "barretenberg/common/throw_or_abort.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/circuit_builder/circuit_builder_base.hpp" - -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -class FibTraceBuilder { - public: - using Flavor = proof_system::honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Row = Fib_vm::Row; - - static std::vector build_trace(); -}; -} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp index 92164873e44..9527e5283d3 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp @@ -367,6 +367,7 @@ class GoblinTranslatorCircuitBuilder : public CircuitBuilderBase class ToyAVMCircuitBuilder { + public: + using FF = typename Flavor::FF; + using Polynomial = typename Flavor::Polynomial; + + static constexpr size_t NUM_POLYNOMIALS = Flavor::NUM_ALL_ENTITIES; + static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; + + using AllPolynomials = typename Flavor::AllPolynomials; + size_t num_gates = 0; + std::array, NUM_WIRES> wires; + ToyAVMCircuitBuilder() = default; + + void add_row(const std::array row) + { + for (size_t i = 0; i < NUM_WIRES; i++) { + wires[i].emplace_back(row[i]); + } + num_gates = wires[0].size(); + } + + /** + * @brief Compute the AVM Template flavor polynomial data required to generate a proof + * + * @return AllPolynomials + */ + AllPolynomials compute_polynomials() + { + + const auto num_gates_log2 = static_cast(numeric::get_msb64(num_gates)); + size_t num_gates_pow2 = 1UL << (num_gates_log2 + (1UL << num_gates_log2 == num_gates ? 0 : 1)); + + AllPolynomials polys; + for (auto& poly : polys.get_all()) { + poly = Polynomial(num_gates_pow2); + } + + polys.lagrange_first[0] = 1; + + for (size_t i = 0; i < num_gates; ++i) { + // Fill out the witness polynomials + polys.permutation_set_column_1[i] = wires[0][i]; + polys.permutation_set_column_2[i] = wires[1][i]; + polys.permutation_set_column_3[i] = wires[2][i]; + polys.permutation_set_column_4[i] = wires[3][i]; + polys.self_permutation_column[i] = wires[4][i]; + // By default the permutation is over all rows where we place data + polys.enable_tuple_set_permutation[i] = 1; + // The same column permutation alternates between even and odd values + polys.enable_single_column_permutation[i] = 1; + polys.enable_first_set_permutation[i] = i & 1; + polys.enable_second_set_permutation[i] = 1 - (i & 1); + } + return polys; + } + + /** + * @brief Check that the circuit is correct (proof should work) + * + */ + bool check_circuit() + { + // using FirstPermutationRelation = typename std::tuple_element_t<0, Flavor::Relations>; + // For now only gamma and beta are used + const FF gamma = FF::random_element(); + const FF beta = FF::random_element(); + proof_system::RelationParameters params{ + .eta = 0, + .beta = beta, + .gamma = gamma, + .public_input_delta = 0, + .lookup_grand_product_delta = 0, + .beta_sqr = 0, + .beta_cube = 0, + .eccvm_set_permutation_delta = 0, + }; + + // Compute polynomial values + auto polynomials = compute_polynomials(); + const size_t num_rows = polynomials.get_polynomial_size(); + + // Check the tuple permutation relation + proof_system::honk::logderivative_library::compute_logderivative_inverse< + Flavor, + honk::sumcheck::GenericPermutationRelation>( + polynomials, params, num_rows); + + using PermutationRelation = + honk::sumcheck::GenericPermutationRelation; + typename honk::sumcheck::GenericPermutationRelation::SumcheckArrayOfValuesOverSubrelations + permutation_result; + for (auto& r : permutation_result) { + r = 0; + } + for (size_t i = 0; i < num_rows; ++i) { + PermutationRelation::accumulate(permutation_result, polynomials.get_row(i), params, 1); + } + for (auto r : permutation_result) { + if (r != 0) { + info("Tuple GenericPermutationRelation failed."); + return false; + } + } + // Check the single permutation relation + proof_system::honk::logderivative_library::compute_logderivative_inverse< + Flavor, + honk::sumcheck::GenericPermutationRelation>( + polynomials, params, num_rows); + + using SameWirePermutationRelation = + honk::sumcheck::GenericPermutationRelation; + typename honk::sumcheck::GenericPermutationRelation::SumcheckArrayOfValuesOverSubrelations + second_permutation_result; + for (auto& r : second_permutation_result) { + r = 0; + } + for (size_t i = 0; i < num_rows; ++i) { + SameWirePermutationRelation::accumulate(second_permutation_result, polynomials.get_row(i), params, 1); + } + for (auto r : second_permutation_result) { + if (r != 0) { + info("Same wire GenericPermutationRelation failed."); + return false; + } + } + return true; + } + + [[nodiscard]] size_t get_num_gates() const { return num_gates; } + + [[nodiscard]] size_t get_circuit_subgroup_size(const size_t num_rows) const + { + + const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); + size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + return num_rows_pow2; + } +}; +} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp new file mode 100644 index 00000000000..62b2e4d83c3 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp @@ -0,0 +1,70 @@ +#include "toy_avm_circuit_builder.hpp" +#include "barretenberg/crypto/generators/generator_data.hpp" +#include + +using namespace barretenberg; + +namespace { +auto& engine = numeric::random::get_debug_engine(); +} + +namespace toy_avm_circuit_builder_tests { + +/** + * @brief A test explaining the work of the permutations in Toy AVM + * + */ +TEST(ToyAVMCircuitBuilder, BaseCase) +{ + + using FF = proof_system::honk::flavor::ToyAVM::FF; + const size_t circuit_size = 16; + proof_system::ToyAVMCircuitBuilder circuit_builder; + + // Sample 2*16 random elements for the tuple permutation example + std::vector column_0; + std::vector column_1; + for (size_t i = 0; i < circuit_size; i++) { + column_0.emplace_back(FF::random_element()); + column_1.emplace_back(FF::random_element()); + } + + // Sample 8 random elements for the single column permutation + std::vector column_2; + for (size_t i = 0; i < circuit_size / 2; i++) { + column_2.emplace_back(FF::random_element()); + } + + for (size_t i = 0; i < circuit_size; i++) { + // We put the same tuple of values in the first 2 wires and in the next 2 to at different rows + // We also put the same value in the self_permutation column in 2 consecutive rows + circuit_builder.add_row({ column_0[i], column_1[i], column_0[15 - i], column_1[15 - i], column_2[i / 2] }); + } + + // Test that permutations with correct values work + bool result = circuit_builder.check_circuit(); + EXPECT_EQ(result, true); + + // Store value temporarily + FF tmp = circuit_builder.wires[0][5]; + + // Replace one of the values in a tuple permutation column with a random one, breaking the permutation + circuit_builder.wires[0][5] = FF::random_element(); + + // Check that it fails + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, false); + + // Restore value + circuit_builder.wires[0][5] = tmp; + + // Check circuit passes + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, true); + + // Break single-column permutation + circuit_builder.wires[circuit_builder.wires.size() - 1][0] = FF::random_element(); + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, false); +} +} // namespace toy_avm_circuit_builder_tests \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp index dce50d0220c..68f22195659 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner.test.cpp @@ -33,9 +33,11 @@ TEST(Protogalaxy, CombinerOn2Instances) }; auto run_test = [&](bool is_random_input) { + // Combiner test on prover polynomisls containing random values, restricted to only the standard arithmetic + // relation. if (is_random_input) { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -46,6 +48,7 @@ TEST(Protogalaxy, CombinerOn2Instances) restrict_to_standard_arithmetic_relation(prover_polynomials); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } @@ -70,7 +73,7 @@ TEST(Protogalaxy, CombinerOn2Instances) EXPECT_EQ(result, expected_result); } else { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -81,6 +84,7 @@ TEST(Protogalaxy, CombinerOn2Instances) restrict_to_standard_arithmetic_relation(prover_polynomials); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } @@ -162,7 +166,7 @@ TEST(Protogalaxy, CombinerOn4Instances) auto run_test = [&]() { std::vector> instance_data(NUM_INSTANCES); - std::array, NUM_INSTANCES> storage_arrays; + std::array storage_arrays; ProtoGalaxyProver prover; std::vector pow_betas = { FF(1), FF(2) }; @@ -172,6 +176,7 @@ TEST(Protogalaxy, CombinerOn4Instances) /*log_circuit_size=*/1); storage_arrays[idx] = std::move(storage); instance->prover_polynomials = prover_polynomials; + instance->instance_size = 2; instance_data[idx] = instance; } diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py index 906d1948847..ac701d41e95 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/combiner_example_gen.py @@ -103,7 +103,6 @@ def compute_first_example(): row.q_l, row.q_r, row.q_o, row.q_c) accumulator += zeta_pow * relation_value zeta_pow *= zeta - return accumulator diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp index 61118b8b6e1..171f9b38a78 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/folding_result.hpp @@ -1,39 +1,18 @@ #pragma once #include "barretenberg/flavor/flavor.hpp" #include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/sumcheck/instance/prover_instance.hpp" namespace proof_system::honk { -template struct ProverFoldingResult { - public: - using ProverPolynomials = typename Flavor::ProverPolynomials; - using FoldingParameters = typename Flavor::FoldingParameters; - ProverPolynomials folded_prover_polynomials; - // TODO(https://github.com/AztecProtocol/barretenberg/issues/656): turn folding data into a struct - std::vector folding_data; - FoldingParameters params; -}; - -template struct VerifierFoldingResult { - using FF = typename Flavor::FF; - using VerificationKey = typename Flavor::VerificationKey; - using FoldingParameters = typename Flavor::FoldingParameters; - std::vector folded_public_inputs; - std::shared_ptr folded_verification_key; - FoldingParameters parameters; -}; - /** - * @brief The aggregated result from the prover and verifier after a round of folding, used to create a new Instance. + * @brief The result of running the Protogalaxy prover containing a new accumulator (relaxed instance) as well as the + * proof data to instantiate the verifier transcript. * * @tparam Flavor */ template struct FoldingResult { - using FF = typename Flavor::FF; - using ProverPolynomials = typename Flavor::ProverPolynomials; - using VerificationKey = typename Flavor::VerificationKey; - using FoldingParameters = typename Flavor::FoldingParameters; - ProverPolynomials folded_prover_polynomials; - std::vector folded_public_inputs; - std::shared_ptr verification_key; - FoldingParameters folding_parameters; + public: + std::shared_ptr> accumulator; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/656): turn folding data into a struct + std::vector folding_data; }; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp index b8543daa64c..4a95f231d52 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.cpp @@ -1,91 +1,306 @@ #include "protogalaxy_prover.hpp" #include "barretenberg/flavor/flavor.hpp" namespace proof_system::honk { +template +void ProtoGalaxyProver_::finalise_and_send_instance(std::shared_ptr instance, + const std::string& domain_separator) +{ + instance->initialize_prover_polynomials(); -template void ProtoGalaxyProver_::prepare_for_folding() + const auto instance_size = static_cast(instance->instance_size); + const auto num_public_inputs = static_cast(instance->public_inputs.size()); + transcript->send_to_verifier(domain_separator + "_instance_size", instance_size); + transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); + + for (size_t i = 0; i < instance->public_inputs.size(); ++i) { + auto public_input_i = instance->public_inputs[i]; + transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); + } + transcript->send_to_verifier(domain_separator + "_pub_inputs_offset", + static_cast(instance->pub_inputs_offset)); + + auto& witness_commitments = instance->witness_commitments; + + // Commit to the first three wire polynomials of the instance + // We only commit to the fourth wire polynomial after adding memory recordss + witness_commitments.w_l = commitment_key->commit(instance->proving_key->w_l); + witness_commitments.w_r = commitment_key->commit(instance->proving_key->w_r); + witness_commitments.w_o = commitment_key->commit(instance->proving_key->w_o); + + auto wire_comms = witness_commitments.get_wires(); + auto commitment_labels = instance->commitment_labels; + auto wire_labels = commitment_labels.get_wires(); + for (size_t idx = 0; idx < 3; ++idx) { + transcript->send_to_verifier(domain_separator + "_" + wire_labels[idx], wire_comms[idx]); + } + + auto eta = transcript->get_challenge(domain_separator + "_eta"); + instance->compute_sorted_accumulator_polynomials(eta); + + // Commit to the sorted withness-table accumulator and the finalized (i.e. with memory records) fourth wire + // polynomial + witness_commitments.sorted_accum = commitment_key->commit(instance->prover_polynomials.sorted_accum); + witness_commitments.w_4 = commitment_key->commit(instance->prover_polynomials.w_4); + + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.sorted_accum, + witness_commitments.sorted_accum); + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.w_4, witness_commitments.w_4); + + auto [beta, gamma] = transcript->get_challenges(domain_separator + "_beta", domain_separator + "_gamma"); + instance->compute_grand_product_polynomials(beta, gamma); + + witness_commitments.z_perm = commitment_key->commit(instance->prover_polynomials.z_perm); + witness_commitments.z_lookup = commitment_key->commit(instance->prover_polynomials.z_lookup); + + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.z_perm, + instance->witness_commitments.z_perm); + transcript->send_to_verifier(domain_separator + "_" + commitment_labels.z_lookup, + instance->witness_commitments.z_lookup); + + instance->alpha = transcript->get_challenge(domain_separator + "_alpha"); + + auto vk_view = instance->verification_key->get_all(); + auto labels = instance->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + labels[idx], vk_view[idx]); + } +} + +template +void ProtoGalaxyProver_::send_accumulator(std::shared_ptr instance, + const std::string& domain_separator) { - auto idx = 0; - for (auto it = instances.begin(); it != instances.end(); it++, idx++) { - auto instance = *it; - instance->initialize_prover_polynomials(); + const auto instance_size = static_cast(instance->instance_size); + const auto num_public_inputs = static_cast(instance->public_inputs.size()); + transcript->send_to_verifier(domain_separator + "_instance_size", instance_size); + transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); - auto domain_separator = std::to_string(idx); - const auto circuit_size = static_cast(instance->proving_key->circuit_size); - const auto num_public_inputs = static_cast(instance->proving_key->num_public_inputs); + for (size_t i = 0; i < instance->public_inputs.size(); ++i) { + auto public_input_i = instance->public_inputs[i]; + transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); + } - transcript->send_to_verifier(domain_separator + "_circuit_size", circuit_size); - transcript->send_to_verifier(domain_separator + "_public_input_size", num_public_inputs); - transcript->send_to_verifier(domain_separator + "_pub_inputs_offset", - static_cast(instance->pub_inputs_offset)); + transcript->send_to_verifier(domain_separator + "_eta", instance->relation_parameters.eta); + transcript->send_to_verifier(domain_separator + "_beta", instance->relation_parameters.beta); + transcript->send_to_verifier(domain_separator + "_gamma", instance->relation_parameters.gamma); + transcript->send_to_verifier(domain_separator + "_public_input_delta", + instance->relation_parameters.public_input_delta); + transcript->send_to_verifier(domain_separator + "_lookup_grand_product_delta", + instance->relation_parameters.lookup_grand_product_delta); - for (size_t i = 0; i < instance->proving_key->num_public_inputs; ++i) { - auto public_input_i = instance->public_inputs[i]; - transcript->send_to_verifier(domain_separator + "_public_input_" + std::to_string(i), public_input_i); - } + transcript->send_to_verifier(domain_separator + "_alpha", instance->alpha); - auto [eta, beta, gamma] = challenges_to_field_elements(transcript->get_challenges( - domain_separator + "_eta", domain_separator + "_beta", domain_separator + "_gamma")); + auto folding_parameters = instance->folding_parameters; + transcript->send_to_verifier(domain_separator + "_target_sum", folding_parameters.target_sum); + for (size_t idx = 0; idx < folding_parameters.gate_challenges.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_gate_challenge_" + std::to_string(idx), + folding_parameters.gate_challenges[idx]); + } - instance->compute_sorted_accumulator_polynomials(eta); - instance->compute_grand_product_polynomials(beta, gamma); - instance->alpha = transcript->get_challenge(domain_separator + "_alpha"); + auto comm_view = instance->witness_commitments.get_all(); + auto witness_labels = instance->commitment_labels.get_witness(); + for (size_t idx = 0; idx < witness_labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + witness_labels[idx], comm_view[idx]); } - fold_relation_parameters(instances); - fold_alpha(instances); + auto vk_view = instance->verification_key->get_all(); + auto vk_labels = instance->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + transcript->send_to_verifier(domain_separator + "_" + vk_labels[idx], vk_view[idx]); + } +} + +template void ProtoGalaxyProver_::prepare_for_folding() +{ + auto idx = 0; + auto instance = instances[0]; + auto domain_separator = std::to_string(idx); + transcript->send_to_verifier(domain_separator + "is_accumulator", instance->is_accumulator); + if (instance->is_accumulator) { + send_accumulator(instance, domain_separator); + } else { + finalise_and_send_instance(instance, domain_separator); + } + idx++; + + for (auto it = instances.begin() + 1; it != instances.end(); it++, idx++) { + auto instance = *it; + auto domain_separator = std::to_string(idx); + finalise_and_send_instance(instance, domain_separator); + } } // TODO(#https://github.com/AztecProtocol/barretenberg/issues/689): finalise implementation this function template -ProverFoldingResult ProtoGalaxyProver_::fold_instances() +FoldingResult ProtoGalaxyProver_::fold_instances() { prepare_for_folding(); + // TODO(#https://github.com/AztecProtocol/barretenberg/issues/740): Handle the case where we are folding for the // first time and accumulator is 0 - // TODO(#https://github.com/AztecProtocol/barretenberg/issues/763): Fold alpha FF delta = transcript->get_challenge("delta"); auto accumulator = get_accumulator(); - auto instance_size = accumulator->prover_polynomials.get_polynomial_size(); - const auto log_instance_size = static_cast(numeric::get_msb(instance_size)); - auto deltas = compute_round_challenge_pows(log_instance_size, delta); + auto deltas = compute_round_challenge_pows(accumulator->log_instance_size, delta); auto perturbator = compute_perturbator(accumulator, deltas); - for (size_t idx = 0; idx <= log_instance_size; idx++) { + for (size_t idx = 0; idx <= accumulator->log_instance_size; idx++) { transcript->send_to_verifier("perturbator_" + std::to_string(idx), perturbator[idx]); } + assert(perturbator[0] == accumulator->folding_parameters.target_sum); + auto perturbator_challenge = transcript->get_challenge("perturbator_challenge"); + instances.next_gate_challenges = + update_gate_challenges(perturbator_challenge, accumulator->folding_parameters.gate_challenges, deltas); + const auto pow_betas_star = + compute_pow_polynomial_at_values(instances.next_gate_challenges, accumulator->instance_size); + + combine_relation_parameters(instances); + combine_alpha(instances); + auto combiner = compute_combiner(instances, pow_betas_star); - FF perturbator_challenge = transcript->get_challenge("perturbator_challenge"); auto compressed_perturbator = perturbator.evaluate(perturbator_challenge); - std::vector betas_star(log_instance_size); - betas_star[0] = 1; - auto betas = accumulator->folding_parameters.gate_separation_challenges; - for (size_t idx = 1; idx < log_instance_size; idx++) { - betas_star[idx] = betas[idx] + perturbator_challenge * deltas[idx - 1]; + auto combiner_quotient = compute_combiner_quotient(compressed_perturbator, combiner); + + for (size_t idx = ProverInstances::NUM; idx < ProverInstances::BATCHED_EXTENDED_LENGTH; idx++) { + transcript->send_to_verifier("combiner_quotient_" + std::to_string(idx), combiner_quotient.value_at(idx)); } + auto combiner_challenge = transcript->get_challenge("combiner_quotient_challenge"); - auto pow_betas_star = compute_pow_polynomial_at_values(betas_star, instance_size); + FoldingResult res; + res.accumulator = + compute_next_accumulator(instances, combiner_quotient, combiner_challenge, compressed_perturbator); + res.folding_data = transcript->proof_data; - auto combiner = compute_combiner(instances, pow_betas_star); - auto combiner_quotient = compute_combiner_quotient(compressed_perturbator, combiner); - for (size_t idx = ProverInstances::NUM; idx < combiner.size(); idx++) { - transcript->send_to_verifier("combiner_quotient_" + std::to_string(idx), combiner_quotient.value_at(idx)); + return res; +} +template +std::shared_ptr ProtoGalaxyProver_::compute_next_accumulator( + ProverInstances& instances, + Univariate& combiner_quotient, + const FF& challenge, + const FF& compressed_perturbator) +{ + auto combiner_quotient_at_challenge = combiner_quotient.evaluate(challenge); + + // Given the challenge \gamma, compute Z(\gamma) and {L_0(\gamma),L_1(\gamma)} + // TODO(https://github.com/AztecProtocol/barretenberg/issues/764): Generalize the vanishing polynomial formula + // and the computation of Lagrange basis for k instances + auto vanishing_polynomial_at_challenge = challenge * (challenge - FF(1)); + std::vector lagranges{ FF(1) - challenge, challenge }; + + auto next_accumulator = std::make_shared(); + + // Compute the next target sum and send the next folding parameters to the verifier + auto next_target_sum = + compressed_perturbator * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + next_accumulator->folding_parameters = { instances.next_gate_challenges, next_target_sum }; + transcript->send_to_verifier("next_target_sum", next_target_sum); + for (size_t idx = 0; idx < instances.next_gate_challenges.size(); idx++) { + transcript->send_to_verifier("next_gate_challenge_" + std::to_string(idx), instances.next_gate_challenges[idx]); } - FF combiner_challenge = transcript->get_challenge("combiner_quotient_challenge"); - auto combiner_quotient_at_challenge = combiner_quotient.evaluate(combiner_challenge); - // TODO(https://github.com/AztecProtocol/barretenberg/issues/764): Generalize these formulas as well as computation - // of Lagrange basis - auto vanishing_polynomial_at_challenge = combiner_challenge * (combiner_challenge - FF(1)); - auto lagrange_0_at_challenge = FF(1) - combiner_challenge; + // Allocate space, initialised to 0, for the prover polynomials of the next accumulator + AllPolynomials storage; + for (auto& polynomial : storage.get_all()) { + polynomial = typename Flavor::Polynomial(instances[0]->instance_size); + for (auto& value : polynomial) { + value = FF(0); + } + } + ProverPolynomials acc_prover_polynomials; + size_t poly_idx = 0; + auto prover_polynomial_pointers = acc_prover_polynomials.get_all(); + for (auto& polynomial : storage.get_all()) { + prover_polynomial_pointers[poly_idx] = polynomial; + poly_idx++; + } - auto new_target_sum = compressed_perturbator * lagrange_0_at_challenge + - vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + // Fold the prover polynomials + auto acc_poly_views = acc_prover_polynomials.get_all(); + for (size_t inst_idx = 0; inst_idx < ProverInstances::NUM; inst_idx++) { + auto inst_poly_views = instances[inst_idx]->prover_polynomials.get_all(); + for (auto [acc_poly_view, inst_poly_view] : zip_view(acc_poly_views, inst_poly_views)) { + for (size_t poly_idx = 0; poly_idx < inst_poly_view.size(); poly_idx++) { + (acc_poly_view)[poly_idx] += (inst_poly_view)[poly_idx] * lagranges[inst_idx]; + } + } + } + next_accumulator->prover_polynomials = acc_prover_polynomials; - ProverFoldingResult res; - res.params.target_sum = new_target_sum; - res.folding_data = transcript->proof_data; - return res; + // Fold the witness commtiments and send them to the verifier + auto witness_labels = next_accumulator->commitment_labels.get_witness(); + size_t comm_idx = 0; + for (auto& acc_comm : next_accumulator->witness_commitments.get_all()) { + acc_comm = Commitment::infinity(); + size_t inst_idx = 0; + for (auto& instance : instances) { + acc_comm = acc_comm + instance->witness_commitments.get_all()[comm_idx] * lagranges[inst_idx]; + inst_idx++; + } + transcript->send_to_verifier("next_" + witness_labels[comm_idx], acc_comm); + comm_idx++; + } + + // Fold public data ϕ from all instances to produce ϕ* and add it to the transcript. As part of the folding + // verification, the verifier will produce ϕ* as well and check it against what was sent by the prover. + + // Fold the public inputs and send to the verifier + next_accumulator->public_inputs = std::vector(instances[0]->public_inputs.size(), 0); + size_t el_idx = 0; + for (auto& el : next_accumulator->public_inputs) { + size_t inst = 0; + for (auto& instance : instances) { + el += instance->public_inputs[el_idx] * lagranges[inst]; + inst++; + } + transcript->send_to_verifier("next_public_input_" + std::to_string(el_idx), el); + el_idx++; + } + + // Evaluate the combined batching challenge α univariate at challenge to obtain next α and send it to the + // verifier + next_accumulator->alpha = instances.alpha.evaluate(challenge); + transcript->send_to_verifier("next_alpha", next_accumulator->alpha); + + // Evaluate each relation parameter univariate at challenge to obtain the folded relation parameters and send to + // the verifier + auto& combined_relation_parameters = instances.relation_parameters; + auto folded_relation_parameters = proof_system::RelationParameters{ + combined_relation_parameters.eta.evaluate(challenge), + combined_relation_parameters.beta.evaluate(challenge), + combined_relation_parameters.gamma.evaluate(challenge), + combined_relation_parameters.public_input_delta.evaluate(challenge), + combined_relation_parameters.lookup_grand_product_delta.evaluate(challenge), + }; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/805): Add the relation parameters to the transcript + // together. + transcript->send_to_verifier("next_eta", folded_relation_parameters.eta); + transcript->send_to_verifier("next_beta", folded_relation_parameters.beta); + transcript->send_to_verifier("next_gamma", folded_relation_parameters.gamma); + transcript->send_to_verifier("next_public_input_delta", folded_relation_parameters.public_input_delta); + transcript->send_to_verifier("next_lookup_grand_product_delta", + folded_relation_parameters.lookup_grand_product_delta); + next_accumulator->relation_parameters = folded_relation_parameters; + + // Fold the verification key and send it to the verifier as this is part of ϕ as well + auto acc_vk = std::make_shared(instances[0]->prover_polynomials.get_polynomial_size(), + instances[0]->public_inputs.size()); + auto labels = next_accumulator->commitment_labels.get_precomputed(); + size_t vk_idx = 0; + for (auto& vk : acc_vk->get_all()) { + size_t inst = 0; + vk = Commitment::infinity(); + for (auto& instance : instances) { + vk = vk + (instance->verification_key->get_all()[vk_idx]) * lagranges[inst]; + inst++; + } + transcript->send_to_verifier("next_" + labels[vk_idx], vk); + vk_idx++; + } + next_accumulator->verification_key = acc_vk; + + return next_accumulator; } + template class ProtoGalaxyProver_>; template class ProtoGalaxyProver_>; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index bc8a95ae2ee..9c881d82a89 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -24,6 +24,11 @@ template class ProtoGalaxyProver_ { using ProverPolynomials = typename Flavor::ProverPolynomials; using Relations = typename Flavor::Relations; using AlphaType = typename ProverInstances::AlphaType; + using VerificationKey = typename Flavor::VerificationKey; + using CommitmentKey = typename Flavor::CommitmentKey; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using Commitment = typename Flavor::Commitment; + using AllPolynomials = typename Flavor::AllPolynomials; using BaseUnivariate = Univariate; // The length of ExtendedUnivariate is the largest length (==max_relation_degree + 1) of a univariate polynomial @@ -44,19 +49,48 @@ template class ProtoGalaxyProver_ { ProverInstances instances; std::shared_ptr transcript = std::make_shared(); + std::shared_ptr commitment_key; + ProtoGalaxyProver_() = default; - ProtoGalaxyProver_(ProverInstances insts) - : instances(insts){}; + ProtoGalaxyProver_(const std::vector>& insts, + const std::shared_ptr& commitment_key) + : instances(ProverInstances(insts)) + , commitment_key(std::move(commitment_key)){}; ~ProtoGalaxyProver_() = default; /** - * @brief Prior to folding we need to add all the public inputs to the transcript, labelled by their corresponding - * instance index, compute all the instance's polynomials and record the relation parameters involved in computing - * these polynomials in the transcript. - * + * @brief Prior to folding, we need to finalize the given instances and add all their public data ϕ to the + * transcript, labelled by their corresponding instance index for domain separation. + * TODO(https://github.com/AztecProtocol/barretenberg/issues/795):The rounds prior to actual proving/folding are + * common between decider and folding verifier and could be somehow shared so we do not duplicate code so much. */ void prepare_for_folding(); + /** + * @brief Send the public data of an accumulator, i.e. a relaxed instance, to the verifier (ϕ in the paper). + * + * @param domain_separator separates the same type of data coming from difference instances by instance + * index + */ + void send_accumulator(std::shared_ptr, const std::string& domain_separator); + + /** + * @brief For each instance produced by a circuit, prior to folding, we need to complete the computation of its + * prover polynomials, commit to witnesses and generate the relation parameters as well as send the public data ϕ of + * an instance to the verifier. + * + * @param domain_separator separates the same type of data coming from difference instances by instance + * index + */ + void finalise_and_send_instance(std::shared_ptr, const std::string& domain_separator); + + /** + * @brief Run the folding prover protocol to produce a new accumulator and a folding proof to be verified by the + * folding verifier. + * + * TODO(https://github.com/AztecProtocol/barretenberg/issues/753): fold goblin polynomials + */ + FoldingResult fold_instances(); /** * @brief Given a vector \vec{\beta} of values, compute the pow polynomial on these values as defined in the paper. */ @@ -89,6 +123,20 @@ template class ProtoGalaxyProver_ { return pows; } + static std::vector update_gate_challenges(const FF perturbator_challenge, + const std::vector& gate_challenges, + const std::vector& round_challenges) + { + auto log_instance_size = gate_challenges.size(); + std::vector next_gate_challenges(log_instance_size); + next_gate_challenges[0] = 1; + + for (size_t idx = 1; idx < log_instance_size; idx++) { + next_gate_challenges[idx] = gate_challenges[idx] + perturbator_challenge * round_challenges[idx - 1]; + } + return next_gate_challenges; + } + // Returns the accumulator, which is the first element in ProverInstances. The accumulator is assumed to have the // FoldingParameters set and be the result of a previous round of folding. // TODO(https://github.com/AztecProtocol/barretenberg/issues/740): handle the case when the accumulator is empty @@ -191,14 +239,12 @@ template class ProtoGalaxyProver_ { { auto full_honk_evaluations = compute_full_honk_evaluations( accumulator->prover_polynomials, accumulator->alpha, accumulator->relation_parameters); - const auto betas = accumulator->folding_parameters.gate_separation_challenges; + const auto betas = accumulator->folding_parameters.gate_challenges; assert(betas.size() == deltas.size()); auto coeffs = construct_perturbator_coefficients(betas, deltas, full_honk_evaluations); return Polynomial(coeffs); } - ProverFoldingResult fold_instances(); - TupleOfTuplesOfUnivariates univariate_accumulators; /** @@ -243,7 +289,7 @@ template class ProtoGalaxyProver_ { ExtendedUnivariateWithRandomization compute_combiner(const ProverInstances& instances, const std::vector& pow_betas_star) { - size_t common_circuit_size = instances[0]->prover_polynomials.get_polynomial_size(); + size_t common_instance_size = instances[0]->instance_size; // Determine number of threads for multithreading. // Note: Multithreading is "on" for every round but we reduce the number of threads from the max available based @@ -251,18 +297,19 @@ template class ProtoGalaxyProver_ { // For now we use a power of 2 number of threads simply to ensure the round size is evenly divided. size_t max_num_threads = get_num_cpus_pow2(); // number of available threads (power of 2) size_t min_iterations_per_thread = 1 << 6; // min number of iterations for which we'll spin up a unique thread - size_t desired_num_threads = common_circuit_size / min_iterations_per_thread; + size_t desired_num_threads = common_instance_size / min_iterations_per_thread; size_t num_threads = std::min(desired_num_threads, max_num_threads); // fewer than max if justified num_threads = num_threads > 0 ? num_threads : 1; // ensure num threads is >= 1 - size_t iterations_per_thread = common_circuit_size / num_threads; // actual iterations per thread + size_t iterations_per_thread = common_instance_size / num_threads; // actual iterations per thread - // Constuct univariate accumulator containers; one per thread + // Construct univariate accumulator containers; one per thread std::vector thread_univariate_accumulators(num_threads); for (auto& accum : thread_univariate_accumulators) { + // just normal relation lengths Utils::zero_univariates(accum); } - // Constuct extended univariates containers; one per thread + // Construct extended univariates containers; one per thread std::vector extended_univariates; extended_univariates.resize(num_threads); @@ -341,21 +388,21 @@ template class ProtoGalaxyProver_ { } /** - * @brief Create folded (univariate) relation parameters. + * @brief Combine each relation parameter, in part, from all the instances into univariates, used in the computation + * of combiner. * @details For a given relation parameter type, extract that parameter from each instance, place the values in a * univariate (i.e., sum them against an appropriate univariate Lagrange basis) and then extended as needed during * the constuction of the combiner. */ - static void fold_relation_parameters(ProverInstances& instances) + static void combine_relation_parameters(ProverInstances& instances) { // array of parameters to be computed - auto& folded_parameters = instances.relation_parameters.to_fold; size_t param_idx = 0; - for (auto& folded_parameter : folded_parameters) { + for (auto& folded_parameter : instances.relation_parameters.to_fold) { Univariate tmp(0); size_t instance_idx = 0; for (auto& instance : instances) { - tmp.value_at(instance_idx) = instance->relation_parameters.to_fold[param_idx]; + tmp.value_at(instance_idx) = instance->relation_parameters.to_fold[param_idx].get(); instance_idx++; } folded_parameter.get() = tmp.template extend_to(); @@ -364,14 +411,15 @@ template class ProtoGalaxyProver_ { } /** - * @brief Create folded univariate for the relation batching parameter (alpha). + * @brief Combine the relation batching parameter (named alpha) from each instance into a univariate, used in the + * computation of combiner. * */ // TODO(https://github.com/AztecProtocol/barretenberg/issues/772): At the moment we have a single α per Instance, we // fold them and then we use the unique folded_α for each folded subrelation that is batched in the combiner. This // is obviously insecure. We need to generate α_i for each subrelation_i, fold them and then use folded_α_i when // batching the i-th folded subrelation in the combiner. - static void fold_alpha(ProverInstances& instances) + static void combine_alpha(ProverInstances& instances) { Univariate accumulated_alpha; size_t instance_idx = 0; @@ -381,6 +429,24 @@ template class ProtoGalaxyProver_ { } instances.alpha = accumulated_alpha.template extend_to(); } + + /** + * @brief Compute the next accumulator (ϕ*, ω*\vec{\beta*}, e*), send the public data ϕ* and the folding parameters + * (\vec{\beta*}, e*) to the verifier and return the complete accumulator + * + * @details At this stage, we assume that the instances have the same size and the same number of public parameter.s + * @param instances + * @param combiner_quotient polynomial K in the paper + * @param challenge + * @param compressed_perturbator + * + * TODO(https://github.com/AztecProtocol/barretenberg/issues/796): optimise the construction of the new accumulator + */ + std::shared_ptr compute_next_accumulator( + ProverInstances& instances, + Univariate& combiner_quotient, + const FF& challenge, + const FF& compressed_perturbator); }; extern template class ProtoGalaxyProver_>; diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp index 1aa99dea379..7107d991228 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.cpp @@ -3,57 +3,148 @@ namespace proof_system::honk { template -void ProtoGalaxyVerifier_::prepare_for_folding(std::vector fold_data) +void ProtoGalaxyVerifier_::receive_accumulator(const std::shared_ptr& inst, + const std::string& domain_separator) +{ + inst->instance_size = transcript->template receive_from_prover(domain_separator + "_instance_size"); + inst->log_instance_size = static_cast(numeric::get_msb(inst->instance_size)); + inst->public_input_size = + transcript->template receive_from_prover(domain_separator + "_public_input_size"); + + for (size_t i = 0; i < inst->public_input_size; ++i) { + auto public_input_i = + transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); + inst->public_inputs.emplace_back(public_input_i); + } + + auto eta = transcript->template receive_from_prover(domain_separator + "_eta"); + auto beta = transcript->template receive_from_prover(domain_separator + "_beta"); + auto gamma = transcript->template receive_from_prover(domain_separator + "_gamma"); + auto public_input_delta = transcript->template receive_from_prover(domain_separator + "_public_input_delta"); + auto lookup_grand_product_delta = + transcript->template receive_from_prover(domain_separator + "_lookup_grand_product_delta"); + inst->relation_parameters = + RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; + inst->alpha = transcript->template receive_from_prover(domain_separator + "_alpha"); + + inst->folding_parameters.target_sum = + transcript->template receive_from_prover(domain_separator + "_target_sum"); + + inst->folding_parameters.gate_challenges = std::vector(inst->log_instance_size); + for (size_t idx = 0; idx < inst->log_instance_size; idx++) { + inst->folding_parameters.gate_challenges[idx] = + transcript->template receive_from_prover(domain_separator + "_gate_challenge_" + std::to_string(idx)); + } + auto comm_view = inst->witness_commitments.get_all(); + auto witness_labels = inst->commitment_labels.get_witness(); + for (size_t idx = 0; idx < witness_labels.size(); idx++) { + comm_view[idx] = + transcript->template receive_from_prover(domain_separator + "_" + witness_labels[idx]); + } + + inst->verification_key = std::make_shared(inst->instance_size, inst->public_input_size); + auto vk_view = inst->verification_key->get_all(); + auto vk_labels = inst->commitment_labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + vk_view[idx] = transcript->template receive_from_prover(domain_separator + "_" + vk_labels[idx]); + } +} + +template +void ProtoGalaxyVerifier_::receive_and_finalise_instance(const std::shared_ptr& inst, + const std::string& domain_separator) +{ + inst->instance_size = transcript->template receive_from_prover(domain_separator + "_instance_size"); + inst->log_instance_size = static_cast(numeric::get_msb(inst->instance_size)); + inst->public_input_size = + transcript->template receive_from_prover(domain_separator + "_public_input_size"); + + for (size_t i = 0; i < inst->public_input_size; ++i) { + auto public_input_i = + transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); + inst->public_inputs.emplace_back(public_input_i); + } + + inst->pub_inputs_offset = + transcript->template receive_from_prover(domain_separator + "_pub_inputs_offset"); + + auto labels = inst->commitment_labels; + auto& witness_commitments = inst->witness_commitments; + witness_commitments.w_l = transcript->template receive_from_prover(domain_separator + "_" + labels.w_l); + witness_commitments.w_r = transcript->template receive_from_prover(domain_separator + "_" + labels.w_r); + witness_commitments.w_o = transcript->template receive_from_prover(domain_separator + "_" + labels.w_o); + + auto eta = transcript->get_challenge(domain_separator + "_eta"); + witness_commitments.sorted_accum = + transcript->template receive_from_prover(domain_separator + "_" + labels.sorted_accum); + witness_commitments.w_4 = transcript->template receive_from_prover(domain_separator + "_" + labels.w_4); + + auto [beta, gamma] = transcript->get_challenges(domain_separator + "_beta", domain_separator + "_gamma"); + witness_commitments.z_perm = + transcript->template receive_from_prover(domain_separator + "_" + labels.z_perm); + witness_commitments.z_lookup = + transcript->template receive_from_prover(domain_separator + "_" + labels.z_lookup); + + const FF public_input_delta = compute_public_input_delta( + inst->public_inputs, beta, gamma, inst->instance_size, inst->pub_inputs_offset); + const FF lookup_grand_product_delta = compute_lookup_grand_product_delta(beta, gamma, inst->instance_size); + inst->relation_parameters = + RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; + + inst->alpha = transcript->get_challenge(domain_separator + "_alpha"); + + inst->verification_key = std::make_shared(inst->instance_size, inst->public_input_size); + auto vk_view = inst->verification_key->get_all(); + auto vk_labels = labels.get_precomputed(); + for (size_t idx = 0; idx < vk_labels.size(); idx++) { + vk_view[idx] = transcript->template receive_from_prover(domain_separator + "_" + vk_labels[idx]); + } +} + +// TODO(https://github.com/AztecProtocol/barretenberg/issues/795): The rounds prior to actual verifying are common +// between decider and folding verifier and could be somehow shared so we do not duplicate code so much. +template +void ProtoGalaxyVerifier_::prepare_for_folding(const std::vector& fold_data) { transcript = std::make_shared(fold_data); auto index = 0; - for (auto it = verifier_instances.begin(); it != verifier_instances.end(); it++, index++) { + auto inst = instances[0]; + auto domain_separator = std::to_string(index); + inst->is_accumulator = transcript->template receive_from_prover(domain_separator + "is_accumulator"); + if (inst->is_accumulator) { + receive_accumulator(inst, domain_separator); + } else { + receive_and_finalise_instance(inst, domain_separator); + } + index++; + + for (auto it = instances.begin() + 1; it != instances.end(); it++, index++) { auto inst = *it; auto domain_separator = std::to_string(index); - inst->instance_size = transcript->template receive_from_prover(domain_separator + "_circuit_size"); - inst->public_input_size = - transcript->template receive_from_prover(domain_separator + "_public_input_size"); - inst->pub_inputs_offset = - transcript->template receive_from_prover(domain_separator + "_pub_inputs_offset"); - - for (size_t i = 0; i < inst->public_input_size; ++i) { - auto public_input_i = - transcript->template receive_from_prover(domain_separator + "_public_input_" + std::to_string(i)); - inst->public_inputs.emplace_back(public_input_i); - } - auto [eta, beta, gamma] = challenges_to_field_elements(transcript->get_challenges( - domain_separator + "_eta", domain_separator + "_beta", domain_separator + "_gamma")); - - const FF public_input_delta = compute_public_input_delta( - inst->public_inputs, beta, gamma, inst->instance_size, inst->pub_inputs_offset); - const FF lookup_grand_product_delta = compute_lookup_grand_product_delta(beta, gamma, inst->instance_size); - inst->relation_parameters = - RelationParameters{ eta, beta, gamma, public_input_delta, lookup_grand_product_delta }; - inst->alpha = transcript->get_challenge(domain_separator + "_alpha"); + receive_and_finalise_instance(inst, domain_separator); } } template -VerifierFoldingResult ProtoGalaxyVerifier_< - VerifierInstances>::fold_public_parameters(std::vector fold_data) +bool ProtoGalaxyVerifier_::verify_folding_proof(std::vector fold_data) { - using Flavor = typename VerifierInstances::Flavor; - prepare_for_folding(fold_data); - FF delta = transcript->get_challenge("delta"); + + auto delta = transcript->get_challenge("delta"); auto accumulator = get_accumulator(); - auto log_instance_size = static_cast(numeric::get_msb(accumulator->instance_size)); - auto deltas = compute_round_challenge_pows(log_instance_size, delta); - std::vector perturbator_coeffs(log_instance_size + 1); - for (size_t idx = 0; idx <= log_instance_size; idx++) { + auto deltas = compute_round_challenge_pows(accumulator->log_instance_size, delta); + + std::vector perturbator_coeffs(accumulator->log_instance_size + 1); + for (size_t idx = 0; idx <= accumulator->log_instance_size; idx++) { perturbator_coeffs[idx] = transcript->template receive_from_prover("perturbator_" + std::to_string(idx)); } + ASSERT(perturbator_coeffs[0] == accumulator->folding_parameters.target_sum); auto perturbator = Polynomial(perturbator_coeffs); FF perturbator_challenge = transcript->get_challenge("perturbator_challenge"); auto perturbator_at_challenge = perturbator.evaluate(perturbator_challenge); - // Thed degree of K(X) is dk - k - 1 = k(d - 1) - 1. Hence we need k(d - 1) evaluations to represent it. - std::array combiner_quotient_evals = {}; + // The degree of K(X) is dk - k - 1 = k(d - 1) - 1. Hence we need k(d - 1) evaluations to represent it. + std::array combiner_quotient_evals; for (size_t idx = 0; idx < VerifierInstances::BATCHED_EXTENDED_LENGTH - VerifierInstances::NUM; idx++) { combiner_quotient_evals[idx] = transcript->template receive_from_prover( "combiner_quotient_" + std::to_string(idx + VerifierInstances::NUM)); @@ -64,14 +155,99 @@ VerifierFoldingResult ProtoGalaxyVerifier_< auto combiner_quotient_at_challenge = combiner_quotient.evaluate(combiner_challenge); auto vanishing_polynomial_at_challenge = combiner_challenge * (combiner_challenge - FF(1)); - auto lagrange_0_at_challenge = FF(1) - combiner_challenge; + auto lagranges = std::vector{ FF(1) - combiner_challenge, combiner_challenge }; + + // Compute next folding parameters and verify against the ones received from the prover + auto expected_next_target_sum = + perturbator_at_challenge * lagranges[0] + vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + auto next_target_sum = transcript->template receive_from_prover("next_target_sum"); + bool verified = (expected_next_target_sum == next_target_sum); + auto expected_betas_star = + update_gate_challenges(perturbator_challenge, accumulator->folding_parameters.gate_challenges, deltas); + for (size_t idx = 0; idx < accumulator->log_instance_size; idx++) { + auto beta_star = transcript->template receive_from_prover("next_gate_challenge_" + std::to_string(idx)); + verified = verified & (expected_betas_star[idx] == beta_star); + } + + // Compute ϕ and verify against the data received from the prover + WitnessCommitments acc_witness_commitments; + auto witness_labels = commitment_labels.get_witness(); + size_t comm_idx = 0; + for (auto& expected_comm : acc_witness_commitments.get_all()) { + expected_comm = Commitment::infinity(); + size_t inst = 0; + for (auto& instance : instances) { + expected_comm = expected_comm + instance->witness_commitments.get_all()[comm_idx] * lagranges[inst]; + inst++; + } + auto comm = transcript->template receive_from_prover("next_" + witness_labels[comm_idx]); + verified = verified & (comm == expected_comm); + comm_idx++; + } - auto new_target_sum = perturbator_at_challenge * lagrange_0_at_challenge + - vanishing_polynomial_at_challenge * combiner_quotient_at_challenge; + std::vector folded_public_inputs(instances[0]->public_inputs.size(), 0); + size_t el_idx = 0; + for (auto& expected_el : folded_public_inputs) { + size_t inst = 0; + for (auto& instance : instances) { + expected_el += instance->public_inputs[el_idx] * lagranges[inst]; + inst++; + } + auto el = transcript->template receive_from_prover("next_public_input" + std::to_string(el_idx)); + verified = verified & (el == expected_el); + el_idx++; + } + + auto expected_alpha = FF(0); + auto expected_parameters = proof_system::RelationParameters{}; + for (size_t inst_idx = 0; inst_idx < VerifierInstances::NUM; inst_idx++) { + auto instance = instances[inst_idx]; + expected_alpha += instance->alpha * lagranges[inst_idx]; + expected_parameters.eta += instance->relation_parameters.eta * lagranges[inst_idx]; + expected_parameters.beta += instance->relation_parameters.beta * lagranges[inst_idx]; + expected_parameters.gamma += instance->relation_parameters.gamma * lagranges[inst_idx]; + expected_parameters.public_input_delta += + instance->relation_parameters.public_input_delta * lagranges[inst_idx]; + expected_parameters.lookup_grand_product_delta += + instance->relation_parameters.lookup_grand_product_delta * lagranges[inst_idx]; + } + + auto next_alpha = transcript->template receive_from_prover("next_alpha"); + verified = verified & (next_alpha == expected_alpha); + info(verified); + auto next_eta = transcript->template receive_from_prover("next_eta"); + verified = verified & (next_eta == expected_parameters.eta); + info(verified); + + auto next_beta = transcript->template receive_from_prover("next_beta"); + verified = verified & (next_beta == expected_parameters.beta); + + auto next_gamma = transcript->template receive_from_prover("next_gamma"); + verified = verified & (next_gamma == expected_parameters.gamma); + + auto next_public_input_delta = transcript->template receive_from_prover("next_public_input_delta"); + verified = verified & (next_public_input_delta == expected_parameters.public_input_delta); + + auto next_lookup_grand_product_delta = + transcript->template receive_from_prover("next_lookup_grand_product_delta"); + verified = verified & (next_lookup_grand_product_delta == expected_parameters.lookup_grand_product_delta); + + auto acc_vk = std::make_shared(instances[0]->instance_size, instances[0]->public_input_size); + auto vk_labels = commitment_labels.get_precomputed(); + size_t vk_idx = 0; + for (auto& expected_vk : acc_vk->get_all()) { + size_t inst = 0; + expected_vk = Commitment::infinity(); + for (auto& instance : instances) { + expected_vk = expected_vk + instance->verification_key->get_all()[vk_idx] * lagranges[inst]; + inst++; + } + auto vk = transcript->template receive_from_prover("next_" + vk_labels[vk_idx]); + verified = verified & (vk == expected_vk); + vk_idx++; + } - VerifierFoldingResult res; - res.parameters.target_sum = new_target_sum; - return res; + return verified; } template class ProtoGalaxyVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp index 3c5a4ed8ef1..c723532a5b9 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_verifier.hpp @@ -12,17 +12,23 @@ template class ProtoGalaxyVerifier_ { using Flavor = typename VerifierInstances::Flavor; using Transcript = typename Flavor::Transcript; using FF = typename Flavor::FF; + using Commitment = typename Flavor::Commitment; using Instance = typename VerifierInstances::Instance; using VerificationKey = typename Flavor::VerificationKey; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; + + VerifierInstances instances; - VerifierInstances verifier_instances; std::shared_ptr transcript = std::make_shared(); + CommitmentLabels commitment_labels; + ProtoGalaxyVerifier_(VerifierInstances insts) - : verifier_instances(insts){}; + : instances(insts){}; ~ProtoGalaxyVerifier_() = default; /** - * @brief For a new round challenge δ at each iteration of the ProtoGalaxy protocol, compute the vector + * @brief Given a new round challenge δ for each iteration of the full ProtoGalaxy protocol, compute the vector * [δ, δ^2,..., δ^t] where t = logn and n is the size of the instance. */ static std::vector compute_round_challenge_pows(size_t log_instance_size, FF round_challenge) @@ -35,21 +41,47 @@ template class ProtoGalaxyVerifier_ { return pows; } - std::shared_ptr get_accumulator() { return verifier_instances[0]; } + static std::vector update_gate_challenges(const FF perturbator_challenge, + const std::vector& gate_challenges, + const std::vector& round_challenges) + { + auto log_instance_size = gate_challenges.size(); + std::vector next_gate_challenges(log_instance_size); + next_gate_challenges[0] = 1; + + for (size_t idx = 1; idx < log_instance_size; idx++) { + next_gate_challenges[idx] = gate_challenges[idx] + perturbator_challenge * round_challenges[idx - 1]; + } + return next_gate_challenges; + } + + std::shared_ptr get_accumulator() { return instances[0]; } /** - * @brief Instatiate the VerifierInstances and the VerifierTranscript. + * @brief Instatiate the instances and the transcript. * * @param fold_data The data transmitted via the transcript by the prover. */ - void prepare_for_folding(std::vector fold_data); + void prepare_for_folding(const std::vector&); + + /** + * @brief Instantiatied the accumulator (i.e. the relaxed instance) from the transcript. + * + */ + void receive_accumulator(const std::shared_ptr&, const std::string&); + + /** + * @brief Process the public data ϕ for the Instances to be folded. + * + */ + void receive_and_finalise_instance(const std::shared_ptr&, const std::string&); /** - * @brief Run the folding protocol on the verifier side. + * @brief Run the folding protocol on the verifier side to establish whether the public data ϕ of the new + * accumulator, received from the prover is the same as that produced by the verifier. * - * TODO(https://github.com/AztecProtocol/barretenberg/issues/690): finalise the implementation of this function */ - VerifierFoldingResult fold_public_parameters(std::vector fold_data); + bool verify_folding_proof(std::vector); }; extern template class ProtoGalaxyVerifier_>; diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index da659a321e9..5508a72c292 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -3,7 +3,7 @@ #include #include "barretenberg/common/constexpr_utils.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/relations/relation_types.hpp" @@ -37,11 +37,19 @@ template class DatabusLookupRelationImpl { * @return true * @return false */ - template static bool lookup_exists_at_row(const AllValues& row) + template static bool operation_exists_at_row(const AllValues& row) { return (row.q_busread == 1 || row.calldata_read_counts > 0); } + /** + * @brief Get the lookup inverse polynomial + * + * @tparam AllEntities + * @param in + * @return auto& + */ + template static auto& get_inverse_polynomial(AllEntities& in) { return in.lookup_inverses; } /** * @brief Compute the Accumulator whose values indicate whether the inverse is computed or not * @details This is needed for efficiency since we don't need to compute the inverse unless the log derivative @@ -154,7 +162,7 @@ template class DatabusLookupRelationImpl { /** * @brief Accumulate the contribution from two surelations for the log derivative databus lookup argument - * @details See lookup_library.hpp for details of the generic log-derivative lookup argument + * @details See logderivative_library.hpp for details of the generic log-derivative lookup argument * * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` * @param in an std::array containing the fully extended Accumulator edges. @@ -167,9 +175,9 @@ template class DatabusLookupRelationImpl { const Parameters& params, const FF& scaling_factor) { - honk::lookup_library::accumulate_logderivative_lookup_subrelation_contributions>( - accumulator, in, params, scaling_factor); + honk::logderivative_library:: + accumulate_logderivative_lookup_subrelation_contributions>( + accumulator, in, params, scaling_factor); } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp index 1daf3469bc7..e52e297cfa4 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp @@ -1,6 +1,6 @@ #include "barretenberg/flavor/ecc_vm.hpp" #include "barretenberg/flavor/relation_definitions_fwd.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "ecc_msm_relation.hpp" namespace proof_system::honk::sumcheck { @@ -25,7 +25,7 @@ void ECCVMLookupRelationImpl::accumulate(ContainerOverSubrelations& accumula const Parameters& params, const FF& scaling_factor) { - lookup_library::accumulate_logderivative_lookup_subrelation_contributions>( + logderivative_library::accumulate_logderivative_lookup_subrelation_contributions>( accumulator, in, params, scaling_factor); } diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp index 35af59f7490..aa3afffc87f 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp @@ -24,12 +24,21 @@ template class ECCVMLookupRelationImpl { static constexpr std::array SUBRELATION_LINEARLY_INDEPENDENT = { true, false }; - template static bool lookup_exists_at_row(const AllValues& row) + template static bool operation_exists_at_row(const AllValues& row) { return (row.msm_add == 1) || (row.msm_skew == 1) || (row.precompute_select == 1); } + /** + * @brief Get the inverse lookup polynomial + * + * @tparam AllEntities + * @param in + * @return auto& + */ + template static auto& get_inverse_polynomial(AllEntities& in) { return in.lookup_inverses; } + template static Accumulator compute_inverse_exists(const AllEntities& in) { diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp deleted file mode 100644 index 428c6a1208b..00000000000 --- a/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp +++ /dev/null @@ -1,64 +0,0 @@ - -#pragma once -#include "../relation_parameters.hpp" -#include "../relation_types.hpp" - -namespace proof_system::Fib_vm { - -template struct Row { - FF Fibonacci_LAST{}; - FF Fibonacci_FIRST{}; - FF Fibonacci_x{}; - FF Fibonacci_y{}; - FF Fibonacci_x_shift{}; - FF Fibonacci_y_shift{}; -}; - -#define DECLARE_VIEWS(index) \ - using View = typename std::tuple_element::type; \ - [[maybe_unused]] auto Fibonacci_LAST = View(new_term.Fibonacci_LAST); \ - [[maybe_unused]] auto Fibonacci_FIRST = View(new_term.Fibonacci_FIRST); \ - [[maybe_unused]] auto Fibonacci_x = View(new_term.Fibonacci_x); \ - [[maybe_unused]] auto Fibonacci_y = View(new_term.Fibonacci_y); \ - [[maybe_unused]] auto Fibonacci_x_shift = View(new_term.Fibonacci_x_shift); \ - [[maybe_unused]] auto Fibonacci_y_shift = View(new_term.Fibonacci_y_shift); - -template class FibImpl { - public: - using FF = FF_; - - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ - 4, - 4, - }; - - template - void static accumulate(ContainerOverSubrelations& evals, - const AllEntities& new_term, - [[maybe_unused]] const RelationParameters&, - [[maybe_unused]] const FF& scaling_factor) - { - - // Contribution 0 - { - DECLARE_VIEWS(0); - - auto tmp = (((-Fibonacci_FIRST + FF(1)) * (-Fibonacci_LAST + FF(1))) * (Fibonacci_x_shift - Fibonacci_y)); - tmp *= scaling_factor; - std::get<0>(evals) += tmp; - } - // Contribution 1 - { - DECLARE_VIEWS(1); - - auto tmp = (((-Fibonacci_FIRST + FF(1)) * (-Fibonacci_LAST + FF(1))) * - (Fibonacci_y_shift - (Fibonacci_x + Fibonacci_y))); - tmp *= scaling_factor; - std::get<1>(evals) += tmp; - } - } -}; - -template using Fib = Relation>; - -} // namespace proof_system::Fib_vm \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp new file mode 100644 index 00000000000..1822c388c4e --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp @@ -0,0 +1,34 @@ +#include "generic_permutation_relation.hpp" +#include "barretenberg/flavor/relation_definitions_fwd.hpp" +#include "barretenberg/flavor/toy_avm.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "relation_definer.hpp" + +namespace proof_system::honk::sumcheck { + +/** + * @brief Expression for generic log-derivative-based set permutation. + * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Accumulator edges. + * @param relation_params contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ +template +template +void GenericPermutationRelationImpl::accumulate(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor) +{ + logderivative_library::accumulate_logderivative_permutation_subrelation_contributions< + FF, + GenericPermutationRelationImpl>(accumulator, in, params, scaling_factor); +} + +// template class GenericPermutationRelationImpl; +// template +// using GenericPermutationRelationExampleSettingsImpl = GenericPermutationRelationImpl; DEFINE_SUMCHECK_RELATION_CLASS(GenericPermutationRelationExampleSettingsImpl, flavor::AVMTemplate); + +DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(GenericPermutationRelationImpl, flavor::ToyAVM); +} // namespace proof_system::honk::sumcheck diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp new file mode 100644 index 00000000000..d4246a423f5 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp @@ -0,0 +1,210 @@ +/** + * @file generic_permutation_relation.hpp + * @author Rumata888 + * @brief This file contains the template for the generic permutation that can be specialized to enforce various + * permutations (for explanation on how to define them, see "relation_definer.hpp") + * + */ +#pragma once +#include +#include + +#include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/relations/relation_types.hpp" + +namespace proof_system::honk::sumcheck { +/** + * @brief Specifies positions of elements in the tuple of entities received from methods in the Settings class + * + */ +enum GenericPermutationSettingIndices { + INVERSE_POLYNOMIAL_INDEX, /* The index of the inverse polynomial*/ + ENABLE_INVERSE_CORRECTNESS_CHECK_POLYNOMIAL_INDEX, /* The index of the polynomial enabling first subrelation*/ + FIRST_PERMUTATION_SET_ENABLE_POLYNOMIAL_INDEX, /* The index of the polynomial that adds an element from the first + set to the sum*/ + SECOND_PERMUTATION_SET_ENABLE_POLYNOMIAL_INDEX, /* The index of the polynomial that adds an element from the second + set to the sum*/ + + PERMUTATION_SETS_START_POLYNOMIAL_INDEX, /* The starting index of the polynomials that are used in the permutation + sets*/ +}; + +template class GenericPermutationRelationImpl { + public: + using FF = FF_; + // Read and write terms counts should stay set to 1 unless we want to permute several columns at once as accumulated + // sets (not as tuples). + static constexpr size_t READ_TERMS = 1; + static constexpr size_t WRITE_TERMS = 1; + // 1 + polynomial degree of this relation + static constexpr size_t LENGTH = READ_TERMS + WRITE_TERMS + 3; // 5 + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + LENGTH, // inverse polynomial correctness sub-relation + LENGTH // log-derived terms subrelation + }; + + /** + * @brief We apply the power polynomial only to the first subrelation + * + *@details The first subrelation establishes correspondence between the inverse polynomial elements and the terms. + *The second relation computes the inverses of individual terms, which are then summed up with sumcheck + * + */ + static constexpr std::array SUBRELATION_LINEARLY_INDEPENDENT = { true, false }; + + /** + * @brief Check if we need to compute the inverse polynomial element value for this row + * @details This proxies to a method in the Settings class + * + * @param row All values at row + */ + template static bool operation_exists_at_row(const AllValues& row) + + { + return Settings::inverse_polynomial_is_computed_at_row(row); + } + + /** + * @brief Get the inverse permutation polynomial (needed to compute its value) + * + */ + template static auto& get_inverse_polynomial(AllEntities& in) + { + // WIRE containing the inverse of the product of terms at this row. Used to reconstruct individual inversed + // terms + return std::get(Settings::get_nonconst_entities(in)); + } + + /** + * @brief Get selector/wire switching on(1) or off(0) inverse computation + * + */ + template + static Accumulator compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + + // WIRE/SELECTOR enabling the permutation used in the sumcheck computation. This affects the first subrelation + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute if the value from the first set exists in this row + * + * @tparam read_index Kept for compatibility with lookups, behavior doesn't change + */ + template + static Accumulator compute_read_term_predicate(const AllEntities& in) + + { + static_assert(read_index < WRITE_TERMS); + using View = typename Accumulator::View; + + // The selector/wire value that determines that an element from the first set needs to be included. Can be + // different from the wire used in the write part. + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute if the value from the second set exists in this row + * + * @tparam write_index Kept for compatibility with lookups, behavior doesn't change + */ + template + static Accumulator compute_write_term_predicate(const AllEntities& in) + { + static_assert(write_index < WRITE_TERMS); + using View = typename Accumulator::View; + + // The selector/wire value that determines that an element from the second set needs to be included. Can be + // different from the wire used in the read part. + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute the value of a single item in the set + * + * @details Computes the polynomial \gamma + \sum_{i=0}^{num_columns}(column_i*\beta^i), so the tuple of columns is + * in the first set + * + * @tparam read_index Kept for compatibility with lookups, behavior doesn't change + * + * @param params Used for beta and gamma + */ + template + static Accumulator compute_read_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + + static_assert(read_index < READ_TERMS); + + // Retrieve all polynomials used + const auto all_polynomials = Settings::get_const_entities(in); + + auto result = Accumulator(0); + + // Iterate over tuple and sum as a polynomial over beta + barretenberg::constexpr_for( + [&]() { result = result * params.beta + View(std::get(all_polynomials)); }); + + const auto& gamma = params.gamma; + return result + gamma; + } + + /** + * @brief Compute the value of a single item in the set + * + * @details Computes the polynomial \gamma + \sum_{i=0}^{num_columns}(column_i*\beta^i), so the tuple of columns is + * in the second set + * + * @tparam write_index Kept for compatibility with lookups, behavior doesn't change + * + * @param params Used for beta and gamma + */ + template + static Accumulator compute_write_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + + static_assert(write_index < WRITE_TERMS); + + // Get all used entities + const auto& used_entities = Settings::get_const_entities(in); + + auto result = Accumulator(0); + // Iterate over tuple and sum as a polynomial over beta + barretenberg::constexpr_for( + [&]() { result = result * params.beta + View(std::get(used_entities)); }); + + const auto& gamma = params.gamma; + return result + gamma; + } + + /** + * @brief Expression for generic log-derivative-based set permutation. + * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Accumulator edges. + * @param relation_params contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ + template + static void accumulate(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor); +}; + +template +using GenericPermutationRelation = Relation>; + +} // namespace proof_system::honk::sumcheck diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp new file mode 100644 index 00000000000..4771c1260b7 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp @@ -0,0 +1,213 @@ +/** + * @file relation_definer.hpp + * @author Rumata888 + * @brief This file contains settings for the General Permutation Relation implementations and (in the future) Lookup + * implementations + * + */ +#pragma once +#include +#include +namespace proof_system::honk::sumcheck { + +/** + * @brief This class contains an example of how to set PermutationSettings classes used by the + * GenericPermutationRelationImpl class to specify a concrete permutation + * + * @details To create your own permutation: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your permutation + * 3) Update "DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to include the new + * settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class ExampleTuplePermutationSettings { + public: + // This constant defines how many columns are bundled together to form each set. For example, in this case we are + // bundling tuples of (permutation_set_column_1, permutation_set_column_2) to be a permutation of + // (permutation_set_column_3,permutation_set_column_4). As the tuple has 2 elements, set the value to 2 + constexpr static size_t COLUMNS_PER_SET = 2; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial at this index. Otherwise the + * value needs to be set to zero. + * + * @details If this is true then permutation takes place in this row + * + */ + template static inline bool inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.enable_tuple_set_permutation == 1); + } + + /** + * @brief Get all the entities for the permutation when we don't need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple( + in.tuple_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_tuple_set_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_tuple_set_permutation, /* Enables adding first set to the sum */ + in.enable_tuple_set_permutation, /* Enables adding second set to the sum */ + in.permutation_set_column_3, /* The first entry in the first set tuple */ + in.permutation_set_column_4, /* The second entry in the first set tuple */ + in.permutation_set_column_1, /* The first entry in the second set tuple */ + in.permutation_set_column_2); /* The second entry in the second set tuple */ + } + + /** + * @brief Get all the entities for the permutation when need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_nonconst_entities(AllEntities& in) + { + return std::forward_as_tuple( + in.tuple_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_tuple_set_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_tuple_set_permutation, /* Enables adding first set to the sum */ + in.enable_tuple_set_permutation, /* Enables adding second set to the sum */ + in.permutation_set_column_3, /* The first entry in the first set tuple */ + in.permutation_set_column_4, /* The second entry in the first set tuple */ + in.permutation_set_column_1, /* The first entry in the second set tuple */ + in.permutation_set_column_2); /* The second entry in the second set tuple */ + } +}; + +/** + * @brief This class contains an example of how to set PermutationSettings classes used by the + * GenericPermutationRelationImpl class to specify a concrete permutation + * + * @details To create your own permutation: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your permutation + * 3) Update "DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to include the new + * settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class ExampleSameWirePermutationSettings { + public: + // This constant defines how many columns are bundled together to form each set. For example, in this case we are + // permuting entries in the column with itself (self_permutation_column), so we choose just one + constexpr static size_t COLUMNS_PER_SET = 1; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial at this index. Otherwise the + * value needs to be set to zero. + * + * @details If this is true then permutation takes place in this row + * + */ + template static inline bool inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.enable_single_column_permutation == 1); + } + + /** + * @brief Get all the entities for the permutation when we don't need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple( + in.single_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_single_column_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_first_set_permutation, /* Enables adding first set to the sum */ + in.enable_second_set_permutation, /* Enables adding second set to the sum */ + in.self_permutation_column, /* The first set column */ + in.self_permutation_column /* The second set column which in this case is the same as the first set column + */ + ); + } + + /** + * @brief Get all the entities for the permutation when need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_nonconst_entities(AllEntities& in) + { + return std::forward_as_tuple( + in.single_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_single_column_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_first_set_permutation, /* Enables adding first set to the sum */ + in.enable_second_set_permutation, /* Enables adding second set to the sum */ + in.self_permutation_column, /* The first set column */ + in.self_permutation_column /* The second set column which in this case is the same as the first set column + */ + ); + } +}; + +#define DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, Settings) \ + template class RelationImplementation; \ + template using RelationImplementation##Settings = RelationImplementation; \ + DEFINE_SUMCHECK_RELATION_CLASS(RelationImplementation##Settings, flavor); + +#define DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(RelationImplementation, flavor) \ + DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleTuplePermutationSettings); \ + DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleSameWirePermutationSettings); + +#define DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, Settings) \ + extern template class RelationImplementation; \ + template using RelationImplementation##Settings = RelationImplementation; \ + DECLARE_SUMCHECK_RELATION_CLASS(RelationImplementation##Settings, flavor); + +#define DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(RelationImplementation, flavor) \ + DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleTuplePermutationSettings); \ + DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleSameWirePermutationSettings); +} // namespace proof_system::honk::sumcheck \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp index 18e2cbe129e..5111ea71a54 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/primitives/bigfield/bigfield.fuzzer.hpp @@ -266,7 +266,7 @@ template class BigFieldBase { mask = (uint256_t(1) << mask_size) - 1; // Choose the bit range // Return instruction - return { .id = instruction_opcode, .arguments.element = Element(temp & mask) }; + return { .id = instruction_opcode, .arguments.element = Element(static_cast(temp & mask)) }; break; case OPCODE::RANDOMSEED: diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt index 7ba574b2604..3b7a634c740 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(stdlib_recursion ecc proof_system stdlib_primitives stdlib_pedersen_commitment stdlib_blake3s ultra_honk) \ No newline at end of file +barretenberg_module(stdlib_recursion ecc proof_system stdlib_primitives stdlib_pedersen_commitment stdlib_blake3s ultra_honk eccvm translator_vm) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.cpp new file mode 100644 index 00000000000..f04c32c9583 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.cpp @@ -0,0 +1,87 @@ +#include "barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp" + +namespace proof_system::plonk::stdlib::recursion::goblin { + +template +MergeRecursiveVerifier_::MergeRecursiveVerifier_(CircuitBuilder* builder) + : builder(builder) +{} + +/** + * @brief Construct recursive verifier for Goblin Merge protocol, up to but not including the pairing + * + * @tparam Flavor + * @param proof + * @return std::array Inputs to final pairing + */ +template +std::array::Element, 2> MergeRecursiveVerifier_::verify_proof( + const plonk::proof& proof) +{ + transcript = std::make_shared(builder, proof.proof_data); + + // Receive commitments [t_i^{shift}], [T_{i-1}], and [T_i] + std::array C_T_prev; + std::array C_t_shift; + std::array C_T_current; + for (size_t idx = 0; idx < NUM_WIRES; ++idx) { + C_T_prev[idx] = transcript->template receive_from_prover("T_PREV_" + std::to_string(idx + 1)); + C_t_shift[idx] = transcript->template receive_from_prover("t_SHIFT_" + std::to_string(idx + 1)); + C_T_current[idx] = transcript->template receive_from_prover("T_CURRENT_" + std::to_string(idx + 1)); + } + + FF kappa = transcript->get_challenge("kappa"); + + // Receive transcript poly evaluations and add corresponding univariate opening claims {(\kappa, p(\kappa), [p(X)]} + std::array T_prev_evals; + std::array t_shift_evals; + std::array T_current_evals; + std::vector opening_claims; + for (size_t idx = 0; idx < NUM_WIRES; ++idx) { + T_prev_evals[idx] = transcript->template receive_from_prover("T_prev_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(OpeningClaim{ { kappa, T_prev_evals[idx] }, C_T_prev[idx] }); + } + for (size_t idx = 0; idx < NUM_WIRES; ++idx) { + t_shift_evals[idx] = transcript->template receive_from_prover("t_shift_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(OpeningClaim{ { kappa, t_shift_evals[idx] }, C_t_shift[idx] }); + } + for (size_t idx = 0; idx < NUM_WIRES; ++idx) { + T_current_evals[idx] = + transcript->template receive_from_prover("T_current_eval_" + std::to_string(idx + 1)); + opening_claims.emplace_back(OpeningClaim{ { kappa, T_current_evals[idx] }, C_T_current[idx] }); + } + + // Check the identity T_i(\kappa) = T_{i-1}(\kappa) + t_i^{shift}(\kappa) + for (size_t idx = 0; idx < NUM_WIRES; ++idx) { + T_current_evals[idx].assert_equal(T_prev_evals[idx] + t_shift_evals[idx]); + } + + FF alpha = transcript->get_challenge("alpha"); + + // Constuct batched commitment and batched evaluation from constituents using batching challenge \alpha + std::vector scalars; + std::vector commitments; + scalars.emplace_back(FF(builder, 1)); + commitments.emplace_back(opening_claims[0].commitment); + auto batched_eval = opening_claims[0].opening_pair.evaluation; + auto alpha_pow = alpha; + for (size_t idx = 1; idx < opening_claims.size(); ++idx) { + auto& claim = opening_claims[idx]; + scalars.emplace_back(alpha_pow); + commitments.emplace_back(claim.commitment); + batched_eval += alpha_pow * claim.opening_pair.evaluation; + alpha_pow *= alpha; + } + + auto batched_commitment = Commitment::batch_mul(commitments, scalars); + + OpeningClaim batched_claim = { { kappa, batched_eval }, batched_commitment }; + + auto pairing_points = KZG::compute_pairing_points(batched_claim, transcript); + + return pairing_points; +} + +template class MergeRecursiveVerifier_; + +} // namespace proof_system::plonk::stdlib::recursion::goblin diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp new file mode 100644 index 00000000000..341d91a1bd1 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp @@ -0,0 +1,31 @@ +#pragma once +#include "barretenberg/commitment_schemes/kzg/kzg.hpp" +#include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/stdlib/primitives/curves/bn254.hpp" +#include "barretenberg/stdlib/recursion/honk/transcript/transcript.hpp" + +namespace proof_system::plonk::stdlib::recursion::goblin { +template class MergeRecursiveVerifier_ { + public: + using Curve = bn254; + using FF = typename Curve::ScalarField; + using Commitment = typename Curve::Element; + using GroupElement = typename Curve::Element; + using KZG = ::proof_system::honk::pcs::kzg::KZG; + using OpeningClaim = ::proof_system::honk::pcs::OpeningClaim; + using PairingPoints = std::array; + using Transcript = honk::Transcript; + + CircuitBuilder* builder; + std::shared_ptr transcript; + + static constexpr size_t NUM_WIRES = arithmetization::UltraHonk::NUM_WIRES; + + explicit MergeRecursiveVerifier_(CircuitBuilder* builder); + + PairingPoints verify_proof(const plonk::proof& proof); +}; + +extern template class MergeRecursiveVerifier_; + +} // namespace proof_system::plonk::stdlib::recursion::goblin diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_verifier.test.cpp new file mode 100644 index 00000000000..39b981c4b6a --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/merge_verifier.test.cpp @@ -0,0 +1,100 @@ +#include "barretenberg/common/test.hpp" +#include "barretenberg/goblin/mock_circuits.hpp" +#include "barretenberg/stdlib/primitives/curves/bn254.hpp" +#include "barretenberg/stdlib/recursion/honk/verifier/merge_recursive_verifier.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +namespace proof_system::plonk::stdlib::recursion::goblin { + +/** + * @brief Test suite for recursive verification of Goblin Merge proofs + * @details The recursive verification circuit is arithmetized using Goblin-style Ultra arithmetization + * (GoblinUltraCircuitBuilder). + * + * @tparam Builder + */ +class RecursiveMergeVerifierTest : public testing::Test { + + // Types for recursive verifier circuit + using RecursiveBuilder = GoblinUltraCircuitBuilder; + using RecursiveMergeVerifier = MergeRecursiveVerifier_; + + // Define types relevant for inner circuit + using GoblinUltraFlavor = ::proof_system::honk::flavor::GoblinUltra; + using GoblinUltraComposer = ::proof_system::honk::UltraComposer_; + using InnerFlavor = GoblinUltraFlavor; + using InnerComposer = GoblinUltraComposer; + using InnerBuilder = typename InnerComposer::CircuitBuilder; + + // Define additional types for testing purposes + using Commitment = InnerFlavor::Commitment; + using FF = InnerFlavor::FF; + using VerifierCommitmentKey = ::proof_system::honk::pcs::VerifierCommitmentKey; + + public: + static void SetUpTestSuite() { barretenberg::srs::init_crs_factory("../srs_db/ignition"); } + + /** + * @brief Test recursive merge verification for the ops generated by a sample circuit + * @details We construct and verify an Ultra Honk proof of the recursive merge verifier circuit to check its + * correctness rather than calling check_circuit since this functionality is incomplete for the Goblin + * arithmetization + */ + static void test_recursive_merge_verification() + { + auto op_queue = std::make_shared(); + + InnerBuilder sample_circuit{ op_queue }; + GoblinTestingUtils::construct_simple_initial_circuit(sample_circuit); + + // Generate a proof over the inner circuit + InnerComposer inner_composer; + auto merge_prover = inner_composer.create_merge_prover(op_queue); + auto merge_proof = merge_prover.construct_proof(); + + // Create a recursive merge verification circuit for the merge proof + RecursiveBuilder outer_circuit; + RecursiveMergeVerifier verifier{ &outer_circuit }; + auto pairing_points = verifier.verify_proof(merge_proof); + + // Check for a failure flag in the recursive verifier circuit + EXPECT_EQ(outer_circuit.failed(), false) << outer_circuit.err(); + + // Check 1: Perform native merge verification then perform the pairing on the outputs of the recursive merge + // verifier and check that the result agrees. + auto native_verifier = inner_composer.create_merge_verifier(); + bool verified_native = native_verifier.verify_proof(merge_proof); + VerifierCommitmentKey pcs_verification_key(0, srs::get_crs_factory()); + auto verified_recursive = + pcs_verification_key.pairing_check(pairing_points[0].get_value(), pairing_points[1].get_value()); + EXPECT_EQ(verified_native, verified_recursive); + EXPECT_TRUE(verified_recursive); + + // Check 2: Ensure that the underlying native and recursive merge verification algorithms agree by ensuring + // the manifests produced by each agree. + auto recursive_manifest = verifier.transcript->get_manifest(); + auto native_manifest = native_verifier.transcript->get_manifest(); + for (size_t i = 0; i < recursive_manifest.size(); ++i) { + EXPECT_EQ(recursive_manifest[i], native_manifest[i]); + } + + // Check 3: Construct and verify a (goblin) ultra honk proof of the Merge recursive verifier circuit + { + GoblinUltraComposer composer; + auto instance = composer.create_instance(outer_circuit); + auto prover = composer.create_prover(instance); + auto verifier = composer.create_verifier(instance); + auto proof = prover.construct_proof(); + bool verified = verifier.verify_proof(proof); + + EXPECT_TRUE(verified); + } + } +}; + +TEST_F(RecursiveMergeVerifierTest, SingleRecursiveVerification) +{ + RecursiveMergeVerifierTest::test_recursive_merge_verification(); +}; + +} // namespace proof_system::plonk::stdlib::recursion::goblin \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp index 785f2c8331a..6c303d15e25 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp @@ -8,8 +8,8 @@ namespace proof_system::plonk::stdlib::recursion::honk { template -UltraRecursiveVerifier_::UltraRecursiveVerifier_(Builder* builder, - std::shared_ptr native_verifier_key) +UltraRecursiveVerifier_::UltraRecursiveVerifier_( + Builder* builder, const std::shared_ptr& native_verifier_key) : key(std::make_shared(builder, native_verifier_key)) , builder(builder) {} diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp index 67b5411025b..62803746d5d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp @@ -19,7 +19,8 @@ template class UltraRecursiveVerifier_ { using Builder = typename Flavor::CircuitBuilder; using PairingPoints = std::array; - explicit UltraRecursiveVerifier_(Builder* builder, std::shared_ptr native_verifier_key); + explicit UltraRecursiveVerifier_(Builder* builder, + const std::shared_ptr& native_verifier_key); UltraRecursiveVerifier_(UltraRecursiveVerifier_&& other) = delete; UltraRecursiveVerifier_(const UltraRecursiveVerifier_& other) = delete; UltraRecursiveVerifier_& operator=(const UltraRecursiveVerifier_& other) = delete; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp index 1fc658eae9c..0fcc8d27614 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/instances.hpp @@ -8,6 +8,7 @@ template struct ProverInstances_ { public: static_assert(NUM_ > 0, "Must have at least one prover instance"); using Flavor = Flavor_; + using FoldingParameters = typename Flavor::FoldingParameters; using FF = typename Flavor::FF; static constexpr size_t NUM = NUM_; using Instance = ProverInstance_; @@ -21,6 +22,7 @@ template struct ProverInstances_ { ArrayType _data; RelationParameters relation_parameters; AlphaType alpha; + std::vector next_gate_challenges; std::shared_ptr const& operator[](size_t idx) const { return _data[idx]; } typename ArrayType::iterator begin() { return _data.begin(); }; @@ -54,16 +56,15 @@ template struct ProverInstances_ { */ std::vector> row_to_univariates(size_t row_idx) const { - auto instance_polynomial_views = get_polynomial_views(); + auto insts_prover_polynomials_views = get_polynomials_views(); std::vector> results; - // Initialize to our amount of columns - results.resize(instance_polynomial_views[0].size()); + // Set the size corresponding to the number of rows in the execution trace + results.resize(insts_prover_polynomials_views[0].size()); size_t instance_idx = 0; - // Iterate instances - for (auto& get_all : instance_polynomial_views) { - // Iterate columns + // Iterate over the prover polynomials' views corresponding to each instance + for (auto& get_all : insts_prover_polynomials_views) { + // Iterate over all columns in the trace execution of an instance and extract their value at row_idx. for (auto [result, poly_ptr] : zip_view(results, get_all)) { - // Assign row for each instance result.evaluations[instance_idx] = (poly_ptr)[row_idx]; } instance_idx++; @@ -72,9 +73,10 @@ template struct ProverInstances_ { } private: - auto get_polynomial_views() const + // Returns a vector containing pointer views to the prover polynomials corresponding to each instance. + auto get_polynomials_views() const { - // As a practical measure, get the first instance's pointer view to deduce the vector type + // As a practical measure, get the first instance's view to deduce the vector type std::vector get_alls{ _data[0]->prover_polynomials.get_all() }; // complete the views, starting from the second item for (size_t i = 1; i < NUM; i++) { @@ -97,14 +99,10 @@ template struct VerifierInstances_ { std::shared_ptr const& operator[](size_t idx) const { return _data[idx]; } typename ArrayType::iterator begin() { return _data.begin(); }; typename ArrayType::iterator end() { return _data.end(); }; - VerifierInstances_(std::vector> vks) + + VerifierInstances_() { - ASSERT(vks.size() == NUM); - for (size_t idx = 0; idx < vks.size(); idx++) { - Instance inst; - inst.verification_key = std::move(vks[idx]); - _data[idx] = std::make_unique(inst); - } + std::generate(_data.begin(), _data.end(), []() { return std::make_unique(); }); }; }; } // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp index 3a62affd0ac..4e5a5d66123 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp @@ -1,5 +1,5 @@ #include "prover_instance.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" #include "barretenberg/proof_system/composer/permutation_lib.hpp" #include "barretenberg/proof_system/library/grand_product_delta.hpp" @@ -353,6 +353,9 @@ template void ProverInstance_::initialize_prover_polynomi size_t idx = i + pub_inputs_offset; public_inputs.emplace_back(public_wires_source[idx]); } + + instance_size = proving_key->circuit_size; + log_instance_size = static_cast(numeric::get_msb(instance_size)); } template void ProverInstance_::compute_sorted_accumulator_polynomials(FF eta) @@ -453,7 +456,7 @@ void ProverInstance_::compute_logderivative_inverse(FF beta, FF gamma) relation_parameters.gamma = gamma; // Compute permutation and lookup grand product polynomials - lookup_library::compute_logderivative_inverse( + logderivative_library::compute_logderivative_inverse( prover_polynomials, relation_parameters, proving_key->circuit_size); } diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp index 5f12dc0c1dd..ea09aa6fceb 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.hpp @@ -3,7 +3,6 @@ #include "barretenberg/flavor/goblin_ultra.hpp" #include "barretenberg/flavor/ultra.hpp" #include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/protogalaxy/folding_result.hpp" #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/srs/factories/file_crs_factory.hpp" @@ -28,6 +27,7 @@ template class ProverInstance_ { using ProverPolynomials = typename Flavor::ProverPolynomials; using Polynomial = typename Flavor::Polynomial; using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; public: std::shared_ptr proving_key; @@ -35,6 +35,7 @@ template class ProverInstance_ { ProverPolynomials prover_polynomials; WitnessCommitments witness_commitments; + CommitmentLabels commitment_labels; std::array sorted_polynomials; @@ -50,6 +51,9 @@ template class ProverInstance_ { std::vector recursive_proof_public_input_indices; // non-empty for the accumulated instances FoldingParameters folding_parameters; + bool is_accumulator = false; + size_t instance_size; + size_t log_instance_size; ProverInstance_(Circuit& circuit) { @@ -58,12 +62,6 @@ template class ProverInstance_ { compute_witness(circuit); } - ProverInstance_(FoldingResult result) - : verification_key(std::move(result.verification_key)) - , prover_polynomials(result.folded_prover_polynomials) - , public_inputs(result.folded_public_inputs) - , folding_parameters(result.folding_parameters){}; - ProverInstance_() = default; ~ProverInstance_() = default; diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp index fb14bd32b8b..06fdc47f264 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/verifier_instance.hpp @@ -8,14 +8,20 @@ template class VerifierInstance_ { using FF = typename Flavor::FF; using VerificationKey = typename Flavor::VerificationKey; using FoldingParameters = typename Flavor::FoldingParameters; + using WitnessCommitments = typename Flavor::WitnessCommitments; + using CommitmentLabels = typename Flavor::CommitmentLabels; std::shared_ptr verification_key; std::vector public_inputs; - size_t pub_inputs_offset; + size_t pub_inputs_offset = 0; size_t public_input_size; size_t instance_size; + size_t log_instance_size; RelationParameters relation_parameters; FF alpha; + bool is_accumulator = false; FoldingParameters folding_parameters; + WitnessCommitments witness_commitments; + CommitmentLabels commitment_labels; }; } // namespace proof_system::honk \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index b5c366be750..7ec78a73799 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -123,13 +123,13 @@ template class SumcheckProverRound { barretenberg::thread_utils::calculate_num_threads_pow2(round_size, min_iterations_per_thread); size_t iterations_per_thread = round_size / num_threads; // actual iterations per thread - // Constuct univariate accumulator containers; one per thread + // Construct univariate accumulator containers; one per thread std::vector thread_univariate_accumulators(num_threads); for (auto& accum : thread_univariate_accumulators) { Utils::zero_univariates(accum); } - // Constuct extended edge containers; one per thread + // Construct extended edge containers; one per thread std::vector extended_edges; extended_edges.resize(num_threads); diff --git a/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp b/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp index 018001ec101..e734829460c 100644 --- a/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp +++ b/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp @@ -4,8 +4,16 @@ #include "barretenberg/crypto/blake3s/blake3s.hpp" #include "barretenberg/crypto/pedersen_hash/pedersen.hpp" +// #define LOG_CHALLENGES +// #define LOG_INTERACTIONS + namespace proof_system::honk { +template +concept Loggable = (std::same_as || std::same_as || + std::same_as || + std::same_as || std::same_as); + // class TranscriptManifest; class TranscriptManifest { struct RoundData { @@ -268,6 +276,11 @@ class BaseTranscript { auto element_bytes = to_buffer(element); proof_data.insert(proof_data.end(), element_bytes.begin(), element_bytes.end()); +#ifdef LOG_INTERACTIONS + if constexpr (Loggable) { + info("sent: ", label, ": ", element); + } +#endif BaseTranscript::consume_prover_element_bytes(label, element_bytes); } @@ -289,6 +302,11 @@ class BaseTranscript { T element = from_buffer(element_bytes); +#ifdef LOG_INTERACTIONS + if constexpr (Loggable) { + info("received: ", label, ": ", element); + } +#endif return element; } @@ -320,7 +338,14 @@ class BaseTranscript { return verifier_transcript; }; - uint256_t get_challenge(const std::string& label) { return get_challenges(label)[0]; } + uint256_t get_challenge(const std::string& label) + { + uint256_t result = get_challenges(label)[0]; +#if defined LOG_CHALLENGES || defined LOG_INTERACTIONS + info("challenge: ", label, ": ", result); +#endif + return result; + } [[nodiscard]] TranscriptManifest get_manifest() const { return manifest; }; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp index ed7c1274d5e..d186ebc6eb7 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp @@ -9,6 +9,9 @@ #include using namespace proof_system::honk; +using CircuitBuilder = flavor::GoblinTranslator::CircuitBuilder; +using Transcript = flavor::GoblinTranslator::Transcript; +using OpQueue = proof_system::ECCOpQueue; namespace test_goblin_translator_composer { @@ -25,15 +28,6 @@ std::vector add_variables(auto& circuit_constructor, std::vector(); @@ -68,14 +62,24 @@ TEST_F(GoblinTranslatorComposerTests, Basic) op_queue->add_accumulate(P1); op_queue->mul_accumulate(P2, z); } - Fq batching_challenge = Fq::random_element(); - Fq x = Fq::random_element(); - auto circuit_builder = proof_system::GoblinTranslatorCircuitBuilder(batching_challenge, x); - circuit_builder.feed_ecc_op_queue_into_circuit(op_queue); + + auto prover_transcript = std::make_shared(); + prover_transcript->send_to_verifier("init", Fq::random_element()); + prover_transcript->export_proof(); + Fq translation_batching_challenge = prover_transcript->get_challenge("Translation:batching_challenge"); + Fq translation_evaluation_challenge = Fq::random_element(); + auto circuit_builder = CircuitBuilder(translation_batching_challenge, translation_evaluation_challenge, op_queue); EXPECT_TRUE(circuit_builder.check_circuit()); auto composer = GoblinTranslatorComposer(); - prove_and_verify(circuit_builder, composer, /*expected_result=*/true); + auto prover = composer.create_prover(circuit_builder, prover_transcript); + auto proof = prover.construct_proof(); + + auto verifier_transcript = std::make_shared(prover_transcript->proof_data); + verifier_transcript->template receive_from_prover("init"); + auto verifier = composer.create_verifier(circuit_builder, verifier_transcript); + bool verified = verifier.verify_proof(proof); + EXPECT_TRUE(verified); } } // namespace test_goblin_translator_composer diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp index cd3e010b0d9..aeaa9a2719a 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp @@ -256,13 +256,12 @@ void GoblinTranslatorProver::execute_preamble_round() const auto SHIFT = uint256_t(1) << Flavor::NUM_LIMB_BITS; const auto SHIFTx2 = uint256_t(1) << (Flavor::NUM_LIMB_BITS * 2); const auto SHIFTx3 = uint256_t(1) << (Flavor::NUM_LIMB_BITS * 3); - const auto accumulated_result = typename Flavor::BF(uint256_t(key->accumulators_binary_limbs_0[1]) + - uint256_t(key->accumulators_binary_limbs_1[1]) * SHIFT + - uint256_t(key->accumulators_binary_limbs_2[1]) * SHIFTx2 + - uint256_t(key->accumulators_binary_limbs_3[1]) * SHIFTx3); + const auto accumulated_result = + BF(uint256_t(key->accumulators_binary_limbs_0[1]) + uint256_t(key->accumulators_binary_limbs_1[1]) * SHIFT + + uint256_t(key->accumulators_binary_limbs_2[1]) * SHIFTx2 + + uint256_t(key->accumulators_binary_limbs_3[1]) * SHIFTx3); transcript->send_to_verifier("circuit_size", circuit_size); transcript->send_to_verifier("evaluation_input_x", key->evaluation_input_x); - transcript->send_to_verifier("batching_challenge_v", key->batching_challenge_v); transcript->send_to_verifier("accumulated_result", accumulated_result); } @@ -366,7 +365,7 @@ void GoblinTranslatorProver::execute_zeromorph_rounds() plonk::proof& GoblinTranslatorProver::export_proof() { - proof.proof_data = transcript->proof_data; + proof.proof_data = transcript->export_proof(); return proof; } diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp index 3a2db24407d..99bf48c490d 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp @@ -12,6 +12,7 @@ class GoblinTranslatorProver { using Flavor = honk::flavor::GoblinTranslator; using FF = typename Flavor::FF; + using BF = typename Flavor::BF; using Commitment = typename Flavor::Commitment; using CommitmentKey = typename Flavor::CommitmentKey; using ProvingKey = typename Flavor::ProvingKey; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp index 089c2c37451..44e736b71d5 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp @@ -68,9 +68,8 @@ void GoblinTranslatorVerifier::put_translation_data_in_relation_parameters(const */ bool GoblinTranslatorVerifier::verify_proof(const plonk::proof& proof) { - transcript = std::make_shared(proof.proof_data); - - transcript = std::make_shared(proof.proof_data); + batching_challenge_v = transcript->get_challenge("Translation:batching_challenge"); + transcript->load_proof(proof.proof_data); Flavor::VerifierCommitments commitments{ key }; Flavor::CommitmentLabels commitment_labels; @@ -78,7 +77,6 @@ bool GoblinTranslatorVerifier::verify_proof(const plonk::proof& proof) // TODO(Adrian): Change the initialization of the transcript to take the VK hash? const auto circuit_size = transcript->template receive_from_prover("circuit_size"); evaluation_input_x = transcript->template receive_from_prover("evaluation_input_x"); - batching_challenge_v = transcript->template receive_from_prover("batching_challenge_v"); const BF accumulated_result = transcript->template receive_from_prover("accumulated_result"); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp index a09a822085b..52bc8aa9a03 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/goblin_ultra_composer.test.cpp @@ -77,7 +77,7 @@ class GoblinUltraHonkComposerTests : public ::testing::Test { bool construct_and_verify_merge_proof(auto& composer, auto& op_queue) { auto merge_prover = composer.create_merge_prover(op_queue); - auto merge_verifier = composer.create_merge_verifier(10); + auto merge_verifier = composer.create_merge_verifier(); auto merge_proof = merge_prover.construct_proof(); bool verified = merge_verifier.verify_proof(merge_proof); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp index a27b2091f3a..671634a3073 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp @@ -94,7 +94,7 @@ template plonk::proof& MergeProver_::construct_proof() FF alpha = transcript->get_challenge("alpha"); - // Constuct batched polynomial to opened via KZG + // Construct batched polynomial to opened via KZG auto batched_polynomial = Polynomial(N); auto batched_eval = FF(0); auto alpha_pow = FF(1); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp index 1d913b342fb..ebf18518afa 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp @@ -3,10 +3,9 @@ namespace proof_system::honk { template -MergeVerifier_::MergeVerifier_(std::unique_ptr verification_key, - const std::shared_ptr& transcript) - : transcript(transcript) - , pcs_verification_key(std::move(verification_key)){}; +MergeVerifier_::MergeVerifier_() + : transcript(std::make_shared()) + , pcs_verification_key(std::make_unique(0, barretenberg::srs::get_crs_factory())){}; /** * @brief Verify proper construction of the aggregate Goblin ECC op queue polynomials T_i^(j), j = 1,2,3,4. @@ -62,7 +61,7 @@ template bool MergeVerifier_::verify_proof(const plonk FF alpha = transcript->get_challenge("alpha"); - // Constuct batched commitment and evaluation from constituents + // Construct batched commitment and evaluation from constituents auto batched_commitment = opening_claims[0].commitment; auto batched_eval = opening_claims[0].opening_pair.evaluation; auto alpha_pow = alpha; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.hpp index d6880476b79..da094df6b7e 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.hpp @@ -5,6 +5,7 @@ #include "barretenberg/flavor/ultra.hpp" #include "barretenberg/plonk/proof_system/types/proof.hpp" #include "barretenberg/proof_system/op_queue/ecc_op_queue.hpp" +#include "barretenberg/srs/global_crs.hpp" #include "barretenberg/transcript/transcript.hpp" namespace proof_system::honk { @@ -31,8 +32,7 @@ template class MergeVerifier_ { std::shared_ptr op_queue; std::shared_ptr pcs_verification_key; - explicit MergeVerifier_(std::unique_ptr verification_key, - const std::shared_ptr& transcript); + explicit MergeVerifier_(); bool verify_proof(const plonk::proof& proof); }; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp index fac96d16cc6..caf1285e0b8 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/protogalaxy.test.cpp @@ -6,14 +6,18 @@ using namespace barretenberg; using namespace proof_system::honk; using Flavor = flavor::Ultra; +using VerificationKey = Flavor::VerificationKey; using Instance = ProverInstance_; using Instances = ProverInstances_; using ProtoGalaxyProver = ProtoGalaxyProver_; using FF = Flavor::FF; +using Affine = Flavor::Commitment; +using Projective = Flavor::GroupElement; using Builder = Flavor::CircuitBuilder; using Polynomial = typename Flavor::Polynomial; using ProverPolynomials = Flavor::ProverPolynomials; using RelationParameters = proof_system::RelationParameters; +using WitnessCommitments = typename Flavor::WitnessCommitments; const size_t NUM_POLYNOMIALS = Flavor::NUM_ALL_ENTITIES; namespace protogalaxy_tests { @@ -81,6 +85,26 @@ ProverPolynomials construct_ultra_full_polynomials(auto& input_polynomials) return full_polynomials; } +std::shared_ptr construct_ultra_verification_key(size_t instance_size, size_t num_public_inputs) +{ + auto verification_key = std::make_shared(instance_size, num_public_inputs); + auto vk_view = verification_key->get_all(); + for (auto& view : vk_view) { + view = Affine(Projective::random_element()); + } + return verification_key; +} + +WitnessCommitments construct_witness_commitments() +{ + WitnessCommitments wc; + auto w_view = wc.get_all(); + for (auto& view : w_view) { + view = Affine(Projective::random_element()); + } + return wc; +} + class ProtoGalaxyTests : public ::testing::Test { public: static void SetUpTestSuite() { barretenberg::srs::init_crs_factory("../srs_db/ignition"); } @@ -159,11 +183,9 @@ TEST_F(ProtoGalaxyTests, PerturbatorPolynomial) target_sum += full_honk_evals[i] * pow_beta[i]; } - auto accumulator = std::make_shared( - FoldingResult{ .folded_prover_polynomials = full_polynomials, - .folded_public_inputs = std::vector{}, - .verification_key = std::make_shared(), - .folding_parameters = { betas, target_sum } }); + auto accumulator = std::make_shared(); + accumulator->prover_polynomials = full_polynomials; + accumulator->folding_parameters = { betas, target_sum }; accumulator->relation_parameters = relation_parameters; accumulator->alpha = alpha; @@ -225,7 +247,7 @@ TEST_F(ProtoGalaxyTests, FoldChallenges) instance2->relation_parameters.eta = 3; Instances instances{ { instance1, instance2 } }; - ProtoGalaxyProver::fold_relation_parameters(instances); + ProtoGalaxyProver::combine_relation_parameters(instances); Univariate expected_eta{ { 1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23 } }; EXPECT_EQ(instances.relation_parameters.eta, expected_eta); @@ -246,10 +268,67 @@ TEST_F(ProtoGalaxyTests, FoldAlpha) instance2->alpha = 4; Instances instances{ { instance1, instance2 } }; - ProtoGalaxyProver::fold_alpha(instances); + ProtoGalaxyProver::combine_alpha(instances); Univariate expected_alpha{ { 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26 } }; EXPECT_EQ(instances.alpha, expected_alpha); } +// TODO(https://github.com/AztecProtocol/barretenberg/issues/807): Have proper full folding testing (both failing and +// passing) and move creating a test accumulator in a separate function. +TEST_F(ProtoGalaxyTests, ComputeNewAccumulator) +{ + const size_t log_instance_size(4); + const size_t instance_size(1 << log_instance_size); + + std::array, NUM_POLYNOMIALS> random_polynomials; + for (auto& poly : random_polynomials) { + poly = get_random_polynomial(instance_size); + } + auto full_polynomials = construct_ultra_full_polynomials(random_polynomials); + auto relation_parameters = proof_system::RelationParameters::get_random(); + auto alpha = FF::random_element(); + + auto full_honk_evals = + ProtoGalaxyProver::compute_full_honk_evaluations(full_polynomials, alpha, relation_parameters); + std::vector betas(log_instance_size); + for (size_t idx = 0; idx < log_instance_size; idx++) { + betas[idx] = FF::random_element(); + } + + // Construct pow(\vec{betas}) as in the paper + auto pow_beta = ProtoGalaxyProver::compute_pow_polynomial_at_values(betas, instance_size); + + // Compute the corresponding target sum and create a dummy accumulator + auto target_sum = FF(0); + for (size_t i = 0; i < instance_size; i++) { + target_sum += full_honk_evals[i] * pow_beta[i]; + } + + auto accumulator = std::make_shared(); + accumulator->witness_commitments = construct_witness_commitments(); + accumulator->instance_size = instance_size; + accumulator->log_instance_size = log_instance_size; + accumulator->prover_polynomials = full_polynomials; + accumulator->folding_parameters = { betas, target_sum }; + accumulator->relation_parameters = relation_parameters; + accumulator->alpha = alpha; + accumulator->is_accumulator = true; + accumulator->public_inputs = std::vector{ FF::random_element() }; + accumulator->verification_key = construct_ultra_verification_key(instance_size, 1); + + auto builder = typename Flavor::CircuitBuilder(); + auto composer = UltraComposer(); + builder.add_public_variable(FF(1)); + + auto instance = composer.create_instance(builder); + auto instances = std::vector>{ accumulator, instance }; + auto folding_prover = composer.create_folding_prover(instances, composer.commitment_key); + auto folding_verifier = composer.create_folding_verifier(); + + auto proof = folding_prover.fold_instances(); + auto res = folding_verifier.verify_folding_proof(proof.folding_data); + EXPECT_EQ(res, true); +} + } // namespace protogalaxy_tests \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp index cdfb202db39..7daaab2e1d3 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.hpp @@ -68,6 +68,8 @@ template class UltraComposer_ { * * @param op_queue * @return MergeProver_ + * TODO(https://github.com/AztecProtocol/barretenberg/issues/804): Goblin should be responsible for constructing + * merge prover/verifier. */ MergeProver_ create_merge_prover( const std::shared_ptr& op_queue, @@ -84,31 +86,21 @@ template class UltraComposer_ { /** * @brief Create Verifier for Goblin ECC op queue merge protocol * - * @param size Size of commitment key required to commit to shifted op queue contribution t_i * @return MergeVerifier_ */ - MergeVerifier_ create_merge_verifier( - size_t srs_size, const std::shared_ptr& transcript = std::make_shared()) - { - auto pcs_verification_key = std::make_unique(srs_size, crs_factory_); - return MergeVerifier_(std::move(pcs_verification_key), transcript); - } + MergeVerifier_ create_merge_verifier() { return MergeVerifier_(); } - ProtoGalaxyProver_ create_folding_prover(const std::vector>& instances) + ProtoGalaxyProver_ create_folding_prover(const std::vector>& instances, + const std::shared_ptr& commitment_key) { - ProverInstances insts(instances); - ProtoGalaxyProver_ output_state(insts); + ProtoGalaxyProver_ output_state(instances, commitment_key); return output_state; }; - ProtoGalaxyVerifier_ create_folding_verifier( - const std::vector>& instances) + ProtoGalaxyVerifier_ create_folding_verifier() { - std::vector> vks; - for (const auto& inst : instances) { - vks.emplace_back(inst->verification_key); - } - VerifierInstances insts(vks); + + auto insts = VerifierInstances(); ProtoGalaxyVerifier_ output_state(insts); return output_state; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp index df70e5ac12c..2775b586c7e 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.cpp @@ -15,6 +15,19 @@ UltraVerifier_::UltraVerifier_(const std::shared_ptr& transc , transcript(transcript) {} +/** + * @brief Construct an UltraVerifier directly from a verification key + * + * @tparam Flavor + * @param verifier_key + */ +template +UltraVerifier_::UltraVerifier_(const std::shared_ptr& verifier_key) + : key(verifier_key) + , pcs_verification_key(std::make_unique(0, barretenberg::srs::get_crs_factory())) + , transcript(std::make_shared()) +{} + template UltraVerifier_::UltraVerifier_(UltraVerifier_&& other) : key(std::move(other.key)) diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.hpp index 6a023071f02..8197e46a941 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_verifier.hpp @@ -2,6 +2,7 @@ #include "barretenberg/flavor/goblin_ultra.hpp" #include "barretenberg/flavor/ultra.hpp" #include "barretenberg/plonk/proof_system/types/proof.hpp" +#include "barretenberg/srs/global_crs.hpp" #include "barretenberg/sumcheck/sumcheck.hpp" namespace proof_system::honk { @@ -15,6 +16,7 @@ template class UltraVerifier_ { public: explicit UltraVerifier_(const std::shared_ptr& transcript, const std::shared_ptr& verifier_key = nullptr); + explicit UltraVerifier_(const std::shared_ptr& verifier_key); UltraVerifier_(UltraVerifier_&& other); UltraVerifier_& operator=(const UltraVerifier_& other) = delete; diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp index 913dc121988..b7daa36ddb3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp @@ -3,7 +3,7 @@ #include "AvmMini_prover.hpp" #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/honk/proof_system/power_polynomial.hpp" #include "barretenberg/polynomials/polynomial.hpp" diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp deleted file mode 100644 index 7a78c264e75..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp +++ /dev/null @@ -1,85 +0,0 @@ - - -#include "./Fib_composer.hpp" -#include "barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp" -#include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/proof_system/composer/permutation_lib.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" - -namespace proof_system::honk { - -using Flavor = honk::flavor::FibFlavor; -void FibComposer::compute_witness(CircuitConstructor& circuit) -{ - if (computed_witness) { - return; - } - - auto polynomials = circuit.compute_polynomials(); - - proving_key->Fibonacci_LAST = polynomials.Fibonacci_LAST; - proving_key->Fibonacci_FIRST = polynomials.Fibonacci_FIRST; - proving_key->Fibonacci_x = polynomials.Fibonacci_x; - proving_key->Fibonacci_y = polynomials.Fibonacci_y; - - computed_witness = true; -} - -FibProver FibComposer::create_prover(CircuitConstructor& circuit_constructor) -{ - compute_proving_key(circuit_constructor); - compute_witness(circuit_constructor); - compute_commitment_key(circuit_constructor.get_circuit_subgroup_size()); - - FibProver output_state(proving_key, commitment_key); - - return output_state; -} - -FibVerifier FibComposer::create_verifier(CircuitConstructor& circuit_constructor) -{ - auto verification_key = compute_verification_key(circuit_constructor); - - FibVerifier output_state(verification_key); - - auto pcs_verification_key = std::make_unique(verification_key->circuit_size, crs_factory_); - - output_state.pcs_verification_key = std::move(pcs_verification_key); - - return output_state; -} - -std::shared_ptr FibComposer::compute_proving_key(CircuitConstructor& circuit_constructor) -{ - if (proving_key) { - return proving_key; - } - - // Initialize proving_key - { - const size_t subgroup_size = circuit_constructor.get_circuit_subgroup_size(); - proving_key = std::make_shared(subgroup_size, 0); - } - - proving_key->contains_recursive_proof = false; - - return proving_key; -} - -std::shared_ptr FibComposer::compute_verification_key(CircuitConstructor& circuit_constructor) -{ - if (verification_key) { - return verification_key; - } - - if (!proving_key) { - compute_proving_key(circuit_constructor); - } - - verification_key = - std::make_shared(proving_key->circuit_size, proving_key->num_public_inputs); - - return verification_key; -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp deleted file mode 100644 index 99c71c1913f..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp +++ /dev/null @@ -1,69 +0,0 @@ - - -#pragma once - -#include "barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp" -#include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/srs/global_crs.hpp" -#include "barretenberg/vm/generated/Fib_prover.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" - -namespace proof_system::honk { -class FibComposer { - public: - using Flavor = honk::flavor::FibFlavor; - using CircuitConstructor = FibCircuitBuilder; - using ProvingKey = Flavor::ProvingKey; - using VerificationKey = Flavor::VerificationKey; - using PCS = Flavor::PCS; - using CommitmentKey = Flavor::CommitmentKey; - using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; - - // TODO: which of these will we really need - static constexpr std::string_view NAME_STRING = "Fib"; - static constexpr size_t NUM_RESERVED_GATES = 0; - static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; - - std::shared_ptr proving_key; - std::shared_ptr verification_key; - - // The crs_factory holds the path to the srs and exposes methods to extract the srs elements - std::shared_ptr> crs_factory_; - - // The commitment key is passed to the prover but also used herein to compute the verfication key commitments - std::shared_ptr commitment_key; - - std::vector recursive_proof_public_input_indices; - bool contains_recursive_proof = false; - bool computed_witness = false; - - FibComposer() { crs_factory_ = barretenberg::srs::get_crs_factory(); } - - FibComposer(std::shared_ptr p_key, std::shared_ptr v_key) - : proving_key(std::move(p_key)) - , verification_key(std::move(v_key)) - {} - - FibComposer(FibComposer&& other) noexcept = default; - FibComposer(FibComposer const& other) noexcept = default; - FibComposer& operator=(FibComposer&& other) noexcept = default; - FibComposer& operator=(FibComposer const& other) noexcept = default; - ~FibComposer() = default; - - std::shared_ptr compute_proving_key(CircuitConstructor& circuit_constructor); - std::shared_ptr compute_verification_key(CircuitConstructor& circuit_constructor); - - void compute_witness(CircuitConstructor& circuit_constructor); - - FibProver create_prover(CircuitConstructor& circuit_constructor); - FibVerifier create_verifier(CircuitConstructor& circuit_constructor); - - void add_table_column_selector_poly_to_proving_key(barretenberg::polynomial& small, const std::string& tag); - - void compute_commitment_key(size_t circuit_size) - { - commitment_key = std::make_shared(circuit_size, crs_factory_); - }; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp deleted file mode 100644 index 36650f8ce61..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp +++ /dev/null @@ -1,51 +0,0 @@ -#include "barretenberg/vm/generated/Fib_composer.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp" -#include "barretenberg/proof_system/plookup_tables/types.hpp" -#include "barretenberg/sumcheck/sumcheck_round.hpp" -#include "barretenberg/vm/generated/Fib_prover.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" -#include -#include -#include -#include -#include - -using namespace proof_system::honk; - -namespace example_relation_honk_composer { - -class FibTests : public ::testing::Test { - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override { barretenberg::srs::init_crs_factory("../srs_db/ignition"); }; -}; - -namespace { -auto& engine = numeric::random::get_debug_engine(); -} - -TEST_F(FibTests, powdre2e) -{ - barretenberg::srs::init_crs_factory("../srs_db/ignition"); - - auto circuit_builder = proof_system::FibCircuitBuilder(); - - auto rows = proof_system::FibTraceBuilder::build_trace(); - circuit_builder.set_trace(std::move(rows)); - - auto composer = FibComposer(); - - bool circuit_gud = circuit_builder.check_circuit(); - ASSERT_TRUE(circuit_gud); - - auto prover = composer.create_prover(circuit_builder); - auto proof = prover.construct_proof(); - - auto verifier = composer.create_verifier(circuit_builder); - bool verified = verifier.verify_proof(proof); - ASSERT_TRUE(verified) << proof; -} - -} // namespace example_relation_honk_composer \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp deleted file mode 100644 index b8cd3fe8907..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp +++ /dev/null @@ -1,136 +0,0 @@ - - -#include "Fib_prover.hpp" -#include "barretenberg/commitment_schemes/claim.hpp" -#include "barretenberg/commitment_schemes/commitment_key.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" -#include "barretenberg/honk/proof_system/permutation_library.hpp" -#include "barretenberg/honk/proof_system/power_polynomial.hpp" -#include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/proof_system/library/grand_product_library.hpp" -#include "barretenberg/relations/lookup_relation.hpp" -#include "barretenberg/relations/permutation_relation.hpp" -#include "barretenberg/sumcheck/sumcheck.hpp" - -namespace proof_system::honk { - -using Flavor = honk::flavor::FibFlavor; - -/** - * Create FibProver from proving key, witness and manifest. - * - * @param input_key Proving key. - * @param input_manifest Input manifest - * - * @tparam settings Settings class. - * */ -FibProver::FibProver(std::shared_ptr input_key, std::shared_ptr commitment_key) - : key(input_key) - , commitment_key(commitment_key) -{ - // TODO: take every polynomial and assign it to the key!! - - prover_polynomials.Fibonacci_LAST = key->Fibonacci_LAST; - prover_polynomials.Fibonacci_FIRST = key->Fibonacci_FIRST; - prover_polynomials.Fibonacci_x = key->Fibonacci_x; - prover_polynomials.Fibonacci_y = key->Fibonacci_y; - - prover_polynomials.Fibonacci_x = key->Fibonacci_x; - prover_polynomials.Fibonacci_x_shift = key->Fibonacci_x.shifted(); - - prover_polynomials.Fibonacci_y = key->Fibonacci_y; - prover_polynomials.Fibonacci_y_shift = key->Fibonacci_y.shifted(); - - // prover_polynomials.lookup_inverses = key->lookup_inverses; - // key->z_perm = Polynomial(key->circuit_size); - // prover_polynomials.z_perm = key->z_perm; -} - -/** - * @brief Add circuit size, public input size, and public inputs to transcript - * - */ -void FibProver::execute_preamble_round() -{ - const auto circuit_size = static_cast(key->circuit_size); - - transcript->send_to_verifier("circuit_size", circuit_size); -} - -/** - * @brief Compute commitments to the first three wires - * - */ -void FibProver::execute_wire_commitments_round() -{ - auto wire_polys = key->get_wires(); - auto labels = commitment_labels.get_wires(); - for (size_t idx = 0; idx < wire_polys.size(); ++idx) { - transcript->send_to_verifier(labels[idx], commitment_key->commit(wire_polys[idx])); - } -} - -/** - * @brief Run Sumcheck resulting in u = (u_1,...,u_d) challenges and all evaluations at u being calculated. - * - */ -void FibProver::execute_relation_check_rounds() -{ - using Sumcheck = sumcheck::SumcheckProver; - - auto sumcheck = Sumcheck(key->circuit_size, transcript); - auto alpha = transcript->get_challenge("alpha"); - - sumcheck_output = sumcheck.prove(prover_polynomials, relation_parameters, alpha); -} - -/** - * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck - * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. - * - * */ -void FibProver::execute_zeromorph_rounds() -{ - ZeroMorph::prove(prover_polynomials.get_unshifted(), - prover_polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript); -} - -plonk::proof& FibProver::export_proof() -{ - proof.proof_data = transcript->proof_data; - return proof; -} - -plonk::proof& FibProver::construct_proof() -{ - // Add circuit size public input size and public inputs to transcript-> - execute_preamble_round(); - - // Compute wire commitments - execute_wire_commitments_round(); - - // TODO: not implemented for codegen just yet - // Compute sorted list accumulator and commitment - // execute_log_derivative_commitments_round(); - - // Fiat-Shamir: bbeta & gamma - // Compute grand product(s) and commitments. - // execute_grand_product_computation_round(); - - // Fiat-Shamir: alpha - // Run sumcheck subprotocol. - execute_relation_check_rounds(); - - // Fiat-Shamir: rho, y, x, z - // Execute Zeromorph multilinear PCS - execute_zeromorph_rounds(); - - return export_proof(); -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp deleted file mode 100644 index 7b9e3cc6862..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp +++ /dev/null @@ -1,62 +0,0 @@ - - -#pragma once -#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/plonk/proof_system/types/proof.hpp" -#include "barretenberg/relations/relation_parameters.hpp" -#include "barretenberg/sumcheck/sumcheck_output.hpp" -#include "barretenberg/transcript/transcript.hpp" - -namespace proof_system::honk { - -class FibProver { - - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using PCS = Flavor::PCS; - using PCSCommitmentKey = Flavor::CommitmentKey; - using ProvingKey = Flavor::ProvingKey; - using Polynomial = Flavor::Polynomial; - using ProverPolynomials = Flavor::ProverPolynomials; - using CommitmentLabels = Flavor::CommitmentLabels; - using Curve = Flavor::Curve; - using Transcript = Flavor::Transcript; - - public: - explicit FibProver(std::shared_ptr input_key, std::shared_ptr commitment_key); - - void execute_preamble_round(); - void execute_wire_commitments_round(); - void execute_relation_check_rounds(); - void execute_zeromorph_rounds(); - - plonk::proof& export_proof(); - plonk::proof& construct_proof(); - - std::shared_ptr transcript = std::make_shared(); - - std::vector public_inputs; - - proof_system::RelationParameters relation_parameters; - - std::shared_ptr key; - - // Container for spans of all polynomials required by the prover (i.e. all multivariates evaluated by Sumcheck). - ProverPolynomials prover_polynomials; - - CommitmentLabels commitment_labels; - - Polynomial quotient_W; - - sumcheck::SumcheckOutput sumcheck_output; - - std::shared_ptr commitment_key; - - using ZeroMorph = pcs::zeromorph::ZeroMorphProver_; - - private: - plonk::proof proof; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp deleted file mode 100644 index a47e2c0fdf0..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp +++ /dev/null @@ -1,89 +0,0 @@ - - -#include "./Fib_verifier.hpp" -#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" -#include "barretenberg/honk/proof_system/power_polynomial.hpp" -#include "barretenberg/numeric/bitop/get_msb.hpp" -#include "barretenberg/transcript/transcript.hpp" - -using namespace barretenberg; -using namespace proof_system::honk::sumcheck; - -namespace proof_system::honk { -FibVerifier::FibVerifier(std::shared_ptr verifier_key) - : key(verifier_key) -{} - -FibVerifier::FibVerifier(FibVerifier&& other) noexcept - : key(std::move(other.key)) - , pcs_verification_key(std::move(other.pcs_verification_key)) -{} - -FibVerifier& FibVerifier::operator=(FibVerifier&& other) noexcept -{ - key = other.key; - pcs_verification_key = (std::move(other.pcs_verification_key)); - commitments.clear(); - return *this; -} - -/** - * @brief This function verifies an Fib Honk proof for given program settings. - * - */ -bool FibVerifier::verify_proof(const plonk::proof& proof) -{ - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Commitment = Flavor::Commitment; - // using Curve = Flavor::Curve; - // using ZeroMorph = pcs::zeromorph::ZeroMorphVerifier_; - using VerifierCommitments = Flavor::VerifierCommitments; - using CommitmentLabels = Flavor::CommitmentLabels; - using Transcript = Flavor::Transcript; - - RelationParameters relation_parameters; - - transcript = std::make_shared(proof.proof_data); - - VerifierCommitments commitments{ key }; - CommitmentLabels commitment_labels; - - const auto circuit_size = transcript->template receive_from_prover("circuit_size"); - - if (circuit_size != key->circuit_size) { - return false; - } - - // Get commitments to VM wires - commitments.Fibonacci_x = transcript->template receive_from_prover(commitment_labels.Fibonacci_x); - commitments.Fibonacci_y = transcript->template receive_from_prover(commitment_labels.Fibonacci_y); - - // Execute Sumcheck Verifier - auto sumcheck = SumcheckVerifier(circuit_size); - - auto alpha = transcript->get_challenge("alpha"); - auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = - sumcheck.verify(relation_parameters, alpha, transcript); - - // If Sumcheck did not verify, return false - if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { - return false; - } - - // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the - // unrolled protocol. - // NOTE: temporarily disabled - facing integration issues - // auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), - // commitments.get_to_be_shifted(), - // claimed_evaluations.get_unshifted(), - // claimed_evaluations.get_shifted(), - // multivariate_challenge, - // transcript); - - // auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); - // return sumcheck_verified.value() && verified; - return sumcheck_verified.value(); -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp deleted file mode 100644 index 303cb4fca70..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp +++ /dev/null @@ -1,33 +0,0 @@ - - -#pragma once -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/plonk/proof_system/types/proof.hpp" -#include "barretenberg/sumcheck/sumcheck.hpp" - -namespace proof_system::honk { -class FibVerifier { - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Commitment = Flavor::Commitment; - using VerificationKey = Flavor::VerificationKey; - using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; - using Transcript = Flavor::Transcript; - - public: - explicit FibVerifier(std::shared_ptr verifier_key = nullptr); - FibVerifier(FibVerifier&& other) noexcept; - FibVerifier(const FibVerifier& other) = delete; - - FibVerifier& operator=(const FibVerifier& other) = delete; - FibVerifier& operator=(FibVerifier&& other) noexcept; - - bool verify_proof(const plonk::proof& proof); - - std::shared_ptr key; - std::map commitments; - std::shared_ptr pcs_verification_key; - std::shared_ptr transcript; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/scripts/bindgen.sh b/barretenberg/scripts/bindgen.sh index 1a2034a213a..e3080a84824 100755 --- a/barretenberg/scripts/bindgen.sh +++ b/barretenberg/scripts/bindgen.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu #find ./cpp/src -type f -name "c_bind*.hpp" | ./scripts/decls_json.py > exports.json diff --git a/barretenberg/sol/scripts/init.sh b/barretenberg/sol/scripts/init.sh index 147adc9ae21..70868e37808 100755 --- a/barretenberg/sol/scripts/init.sh +++ b/barretenberg/sol/scripts/init.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash PLONK_FLAVOUR="ultra" diff --git a/barretenberg/sol/scripts/run_fuzzer.sh b/barretenberg/sol/scripts/run_fuzzer.sh index e6e235a85e5..2c76ad1de43 100755 --- a/barretenberg/sol/scripts/run_fuzzer.sh +++ b/barretenberg/sol/scripts/run_fuzzer.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash PLONK_FLAVOUR=${1:-"ultra"} CIRCUIT_FLAVOUR=${2:-"blake"} diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 1b0d688a21b..fa0bef62e34 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,48 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.6...barretenberg.js-v0.16.7) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.5...barretenberg.js-v0.16.6) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.4...barretenberg.js-v0.16.5) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.3...barretenberg.js-v0.16.4) (2023-12-05) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.2...barretenberg.js-v0.16.3) (2023-12-05) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.1...barretenberg.js-v0.16.2) (2023-12-05) + + +### Miscellaneous + +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.0...barretenberg.js-v0.16.1) (2023-11-28) diff --git a/barretenberg/ts/Dockerfile b/barretenberg/ts/Dockerfile index 1edeb3c4377..d28b5661b2d 100644 --- a/barretenberg/ts/Dockerfile +++ b/barretenberg/ts/Dockerfile @@ -1,11 +1,11 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-wasm-linux-clang -FROM node:18-alpine +FROM node:18.19.0-alpine COPY --from=0 /usr/src/barretenberg /usr/src/barretenberg # Create a standalone container that can run bb.js (and tests). # We'll perform the build in a new, different directory, so the original directory can become the "published" package. -WORKDIR /usr/src/barretenberg/ts +WORKDIR /usr/src/barretenberg/ts-build # Leverage layer caching. Only re-install packages if these files change. COPY .yarn .yarn COPY package.json package.json @@ -17,4 +17,4 @@ RUN yarn formatting && SKIP_CPP_BUILD=1 yarn build CMD ["yarn", "test"] # We want to create a pure package, as would be published to npm, for consuming projects. -RUN yarn pack && tar zxf package.tgz && rm package.tgz \ No newline at end of file +RUN yarn pack && tar zxf package.tgz && rm package.tgz && mv package ../ts \ No newline at end of file diff --git a/barretenberg/ts/bootstrap.sh b/barretenberg/ts/bootstrap.sh index 8b2d5b76c66..2f0fa19bb81 100755 --- a/barretenberg/ts/bootstrap.sh +++ b/barretenberg/ts/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 64b06f27e09..79ba376f8ae 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/bb.js", - "version": "0.16.1", + "version": "0.16.7", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", @@ -20,14 +20,13 @@ "README.md" ], "scripts": { - "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs package.tgz package", - "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser && yarn build:package", + "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs", + "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser", "build:wasm": "./scripts/build_wasm.sh", "build:esm": "tsc -b && chmod +x ./dest/node/main.js", "build:cjs": "tsc -b tsconfig.cjs.json && ./scripts/cjs_postprocess.sh", "build:browser": "webpack", "build:bindings": "cd .. && ./scripts/bindgen.sh", - "build:package": "yarn pack && tar zxf package.tgz && rm -f package.tgz", "formatting": "prettier --check ./src && eslint --max-warnings 0 ./src", "formatting:fix": "prettier -w ./src", "test": "NODE_OPTIONS='--loader ts-node/esm' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --passWithNoTests", diff --git a/barretenberg/ts/scripts/cjs_postprocess.sh b/barretenberg/ts/scripts/cjs_postprocess.sh index ccfcfc2d8a2..8a805bcdd44 100755 --- a/barretenberg/ts/scripts/cjs_postprocess.sh +++ b/barretenberg/ts/scripts/cjs_postprocess.sh @@ -11,4 +11,6 @@ DIR="./dest/node-cjs" for FILE in $(find "$DIR" -name "*.js"); do # Use sed to replace 'import.meta.url' with '""' sed -i.bak 's/import\.meta\.url/""/g' "$FILE" && rm "$FILE.bak" + # Use sed to remove any lines postfixed // POSTPROCESS ESM ONLY + sed -i.bak '/\/\/ POSTPROCESS ESM ONLY$/d' "$FILE" && rm "$FILE.bak" done \ No newline at end of file diff --git a/barretenberg/ts/scripts/run_tests b/barretenberg/ts/scripts/run_tests index ed93c9a8091..0f0d2895cec 100755 --- a/barretenberg/ts/scripts/run_tests +++ b/barretenberg/ts/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -xe $(aws ecr get-login --region us-east-2 --no-include-email) 2> /dev/null diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 40b2ef0261e..6b00a9b752a 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -41,7 +41,8 @@ export class Barretenberg extends BarretenbergApi { } } -let barretenbergSyncSingleton: Promise; +let barretenbergSyncSingleton: BarretenbergSync; +let barretenbergSyncSingletonPromise: Promise; export class BarretenbergSync extends BarretenbergApiSync { private constructor(wasm: BarretenbergWasmMain) { @@ -55,9 +56,16 @@ export class BarretenbergSync extends BarretenbergApiSync { return new BarretenbergSync(wasm); } + static initSingleton() { + if (!barretenbergSyncSingletonPromise) { + barretenbergSyncSingletonPromise = BarretenbergSync.new().then(s => (barretenbergSyncSingleton = s)); + } + return barretenbergSyncSingletonPromise; + } + static getSingleton() { if (!barretenbergSyncSingleton) { - barretenbergSyncSingleton = BarretenbergSync.new(); + throw new Error('First call BarretenbergSync.initSingleton() on @aztec/bb.js module.'); } return barretenbergSyncSingleton; } @@ -66,3 +74,9 @@ export class BarretenbergSync extends BarretenbergApiSync { return this.wasm; } } + +// If we're in ESM environment, use top level await. CJS users need to call it manually. +// Need to ignore for cjs build. +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore +await BarretenbergSync.initSingleton(); // POSTPROCESS ESM ONLY diff --git a/bootstrap.sh b/bootstrap.sh index 5a953eee6b5..ac402f07baa 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Usage: # Bootstraps the repo. End to end tests should be runnable after a bootstrap: # ./bootstrap.sh diff --git a/bootstrap/bootstrap_test.sh b/bootstrap/bootstrap_test.sh index 805f0d0ef0e..8e05b2650ef 100755 --- a/bootstrap/bootstrap_test.sh +++ b/bootstrap/bootstrap_test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script takes the state of your current repository, and clones it inside of a docker container. # You likely don't have a fresh clone, and it's paramount that to test bootstrapping, we don't have any # intermediate build state in the context. diff --git a/bootstrap_docker.sh b/bootstrap_docker.sh index 6fc48b2b118..6d086d1d331 100755 --- a/bootstrap_docker.sh +++ b/bootstrap_docker.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script builds the projects listed in build_manifest.sh, terminating when it reaches PROJECT_NAME. # If run from within a project, it will build only that project, unless env var ONLY_TARGET=false. # diff --git a/build-system/.gitrepo b/build-system/.gitrepo index af88cb9a30d..5b99662494d 100644 --- a/build-system/.gitrepo +++ b/build-system/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/build-system branch = master - commit = 76bf210bdfee19c8d6840138edb17a8ff11ef3fb - parent = 9f682cb8cf37eb392c4979f62fdec7126fb4d102 + commit = 1f7b38d90be36c46b46ac52c5d63ce7a0d627ecf + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b method = merge cmdver = 0.4.6 diff --git a/build-system/remote/bootstrap_build_instance.sh b/build-system/remote/bootstrap_build_instance.sh index 535bcdd7ce7..c911eb113b3 100644 --- a/build-system/remote/bootstrap_build_instance.sh +++ b/build-system/remote/bootstrap_build_instance.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt update diff --git a/build-system/scripts/add_timestamps b/build-system/scripts/add_timestamps index 8b658925ba5..230d51fb626 100755 --- a/build-system/scripts/add_timestamps +++ b/build-system/scripts/add_timestamps @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash while IFS= read -r line; do printf '%(%Y-%m-%d %H:%M:%S)T %s\n' -1 "$line" done diff --git a/build-system/scripts/augment_prompt b/build-system/scripts/augment_prompt new file mode 100755 index 00000000000..e3dc524043b --- /dev/null +++ b/build-system/scripts/augment_prompt @@ -0,0 +1,2 @@ +# Used to augment the prompt when using start_interactive and zsh. +echo "b " \ No newline at end of file diff --git a/build-system/scripts/build b/build-system/scripts/build index 6906a275149..c82e6bde0a6 100755 --- a/build-system/scripts/build +++ b/build-system/scripts/build @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # # Builds a docker image and pushes it to it's repository. Leverages caches where possible. # Cached images include previous successfully built images (including multi-stages) built on this branch. diff --git a/build-system/scripts/build_local b/build-system/scripts/build_local index 7ef1d3c5d8e..0bc99f17d4a 100755 --- a/build-system/scripts/build_local +++ b/build-system/scripts/build_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Builds the PROJECTS in the given order. # Will terminate build at TARGET_PROJECT (if given). # Will only build TARGET_PROJECT if ONLY_TARGET given. @@ -114,6 +114,9 @@ for E in "${PROJECTS[@]}"; do # Retag for aztecprotocol dockerhub. docker tag $DEPLOY_IMAGE_URI aztecprotocol/$REPO:latest + echo -e "${BOLD}Tagged${RESET}: aztecprotocol/$REPO:latest" + echo -e "${BOLD}SHA256${RESET}: $(docker inspect --format='{{.Id}}' $DEPLOY_IMAGE_URI)" + if [ "$PROJECT_DIR_NAME" = "$TARGET_PROJECT" ]; then if [ -n "$LAUNCH" ]; then docker run -ti --rm aztecprotocol/$REPO:latest diff --git a/build-system/scripts/calculate_content_hash b/build-system/scripts/calculate_content_hash index 28ae8ff2e62..7b7b68ade71 100755 --- a/build-system/scripts/calculate_content_hash +++ b/build-system/scripts/calculate_content_hash @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/calculate_image_tag b/build-system/scripts/calculate_image_tag index c273648287c..f2a3cea871f 100755 --- a/build-system/scripts/calculate_image_tag +++ b/build-system/scripts/calculate_image_tag @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Return a repositories build cache image tag based on content hash. # If the second argument is set: # It's used to suffix the tag with the given unique arch descriptor. diff --git a/build-system/scripts/calculate_image_uri b/build-system/scripts/calculate_image_uri index 8efd7ab4cdc..c107647434c 100755 --- a/build-system/scripts/calculate_image_uri +++ b/build-system/scripts/calculate_image_uri @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/calculate_rebuild_files b/build-system/scripts/calculate_rebuild_files index 9be1afd4912..49e07634955 100755 --- a/build-system/scripts/calculate_rebuild_files +++ b/build-system/scripts/calculate_rebuild_files @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/check_rebuild b/build-system/scripts/check_rebuild index 76f99c6540e..df2291065e4 100755 --- a/build-system/scripts/check_rebuild +++ b/build-system/scripts/check_rebuild @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # If this script fails (nonzero exit), then the caller should rebuild. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/clean_image_tags b/build-system/scripts/clean_image_tags index 832fdf4cda6..9c4557e68c6 100755 --- a/build-system/scripts/clean_image_tags +++ b/build-system/scripts/clean_image_tags @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -e @@ -20,7 +20,7 @@ fi # Collect all the commits ids in the repository and remove the remote for faster lookups # See warning in https://github.blog/2020-12-21-get-up-to-speed-with-partial-clone-and-shallow-clone/ -git config fetch.recurseSubmodules false +git config fetch.recurseSubmodules false git fetch --filter=tree:0 origin ORIGIN_URL=$(git remote get-url origin) git remote remove origin @@ -29,7 +29,7 @@ git remote remove origin # This happens for all commits tagged for PRs that then get squashed and merged IFS=$'\n' for TAG in $IMAGE_TAGS; do - if [[ $TAG =~ ^cache-[0-9a-fA-F]+-builder$ ]]; then + if [[ $TAG =~ ^cache-[0-9a-fA-F]+-builder$ ]]; then TAG_COMMIT=$(echo "$TAG" | cut -d '-' -f 2) if git cat-file -e $TAG_COMMIT; then echo "Commit for $TAG found" diff --git a/build-system/scripts/cond_run_compose b/build-system/scripts/cond_run_compose index ee412325a6a..d1d14c6baca 100755 --- a/build-system/scripts/cond_run_compose +++ b/build-system/scripts/cond_run_compose @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/cond_run_container b/build-system/scripts/cond_run_container index ca8d67fc0bd..12badea45af 100755 --- a/build-system/scripts/cond_run_container +++ b/build-system/scripts/cond_run_container @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if the REPOSITORY content hash has changed and we haven't had a successful run. # # Arguments are: diff --git a/build-system/scripts/cond_run_script b/build-system/scripts/cond_run_script index c1ada5f8cf2..1eb6b3695d9 100755 --- a/build-system/scripts/cond_run_script +++ b/build-system/scripts/cond_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if the REPOSITORY content hash has changed and we haven't had a successful run. # # Arguments are: diff --git a/build-system/scripts/cond_spot_run_build b/build-system/scripts/cond_spot_run_build index 3333e5dec7c..a2e2663de89 100755 --- a/build-system/scripts/cond_spot_run_build +++ b/build-system/scripts/cond_spot_run_build @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/cond_spot_run_compose b/build-system/scripts/cond_spot_run_compose index 51a4529471a..56da3e3da4e 100755 --- a/build-system/scripts/cond_spot_run_compose +++ b/build-system/scripts/cond_spot_run_compose @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/cond_spot_run_container b/build-system/scripts/cond_spot_run_container index 48876a65f3b..5b2ebc71f32 100755 --- a/build-system/scripts/cond_spot_run_container +++ b/build-system/scripts/cond_spot_run_container @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/cond_spot_run_script b/build-system/scripts/cond_spot_run_script index 4b1832127ac..23a678fcd32 100755 --- a/build-system/scripts/cond_spot_run_script +++ b/build-system/scripts/cond_spot_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script on a remote spot instance if the REPOSITORY content hash has changed and we haven't had a # successful run. # The TAG_POSTFIX is used by cond_spot_run_test whereby we use an image tag postfixed with JOB_NAME to identifify if @@ -19,7 +19,12 @@ CPUS=$2 ARCH=$3 shift 3 -BASE_TAG=$(calculate_image_tag $REPOSITORY) +MULTIARCH=$(query_manifest multiarch $REPOSITORY) +if [ "$MULTIARCH" == "host" ]; then + BASE_TAG=$(calculate_image_tag $REPOSITORY $ARCH) +else + BASE_TAG=$(calculate_image_tag $REPOSITORY) +fi SUCCESS_TAG=$BASE_TAG if [ -n "${TAG_POSTFIX:-}" ]; then diff --git a/build-system/scripts/cond_spot_run_test b/build-system/scripts/cond_spot_run_test index e8b8f71358d..cc48ffc742e 100755 --- a/build-system/scripts/cond_spot_run_test +++ b/build-system/scripts/cond_spot_run_test @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/create_ecr_manifest b/build-system/scripts/create_ecr_manifest index 5bda420039d..fcc96d7651e 100755 --- a/build-system/scripts/create_ecr_manifest +++ b/build-system/scripts/create_ecr_manifest @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script: # 1. Logs into ECR and ensures we have the given repository # 2. Computes the image uri of the cached images for the given repository given the list of architectures diff --git a/build-system/scripts/deploy b/build-system/scripts/deploy index ca43d5b9cfb..a158175c414 100755 --- a/build-system/scripts/deploy +++ b/build-system/scripts/deploy @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_dockerhub b/build-system/scripts/deploy_dockerhub index 606e0952b5d..b8d925b0711 100755 --- a/build-system/scripts/deploy_dockerhub +++ b/build-system/scripts/deploy_dockerhub @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_ecr b/build-system/scripts/deploy_ecr index 3be534694c7..c156f99e71e 100755 --- a/build-system/scripts/deploy_ecr +++ b/build-system/scripts/deploy_ecr @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_global b/build-system/scripts/deploy_global index 28b70a28619..2267474706f 100755 --- a/build-system/scripts/deploy_global +++ b/build-system/scripts/deploy_global @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Deployment script for global service (e.g. company website and metrics). [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_npm b/build-system/scripts/deploy_npm index a35d68c4650..16df5a156d0 100755 --- a/build-system/scripts/deploy_npm +++ b/build-system/scripts/deploy_npm @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_s3 b/build-system/scripts/deploy_s3 index caaa4d00bd9..87acbf004d9 100755 --- a/build-system/scripts/deploy_s3 +++ b/build-system/scripts/deploy_s3 @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_service b/build-system/scripts/deploy_service index b014ca0d823..83c45128141 100755 --- a/build-system/scripts/deploy_service +++ b/build-system/scripts/deploy_service @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/deploy_terraform b/build-system/scripts/deploy_terraform index ce2231853aa..b81c4d3f4b2 100755 --- a/build-system/scripts/deploy_terraform +++ b/build-system/scripts/deploy_terraform @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/dockerhub_login b/build-system/scripts/dockerhub_login index dae523fb43b..50a2068377b 100755 --- a/build-system/scripts/dockerhub_login +++ b/build-system/scripts/dockerhub_login @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Retries up to 3 times with 10 second intervals for i in $(seq 1 3); do diff --git a/build-system/scripts/ecr_login b/build-system/scripts/ecr_login index 54a8e6d36da..331b49d4024 100755 --- a/build-system/scripts/ecr_login +++ b/build-system/scripts/ecr_login @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu REGION=${1:-$ECR_REGION} # Retries up to 3 times with 10 second intervals diff --git a/build-system/scripts/ensure_repo b/build-system/scripts/ensure_repo index b767976354f..b71c0fabb2f 100755 --- a/build-system/scripts/ensure_repo +++ b/build-system/scripts/ensure_repo @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Logs the shell into the ECR instance at the given region, establishes if the given repository exists, creates it if it # doesn't, and re-applies thie lifecycle policy (determines when images should be automatically deleted) if it does. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/build-system/scripts/ensure_terraform b/build-system/scripts/ensure_terraform index d7444aa4e4c..ce53dba1b31 100755 --- a/build-system/scripts/ensure_terraform +++ b/build-system/scripts/ensure_terraform @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Downloads and installs `terraform` if it's not installed. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/extract_repo b/build-system/scripts/extract_repo index ecae3c84152..6bdae1027f9 100755 --- a/build-system/scripts/extract_repo +++ b/build-system/scripts/extract_repo @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Given a repository, extracts the builds entire /usr/src dir to the given path. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu @@ -16,7 +16,7 @@ TEMP_CONTAINER=$(docker create $IMAGE_COMMIT_URI) echo "Extracting $EXTRACT_FROM from $REPOSITORY to $EXTRACT_TO..." mkdir -p $EXTRACT_TO docker cp $TEMP_CONTAINER:$EXTRACT_FROM $EXTRACT_TO -docker rm -v $TEMP_CONTAINER > /dev/null +docker rm -v $TEMP_CONTAINER >/dev/null echo "Extracted contents:" ls -al $EXTRACT_TO diff --git a/build-system/scripts/extract_tag_version b/build-system/scripts/extract_tag_version index d142458104d..c0f7c3fd165 100755 --- a/build-system/scripts/extract_tag_version +++ b/build-system/scripts/extract_tag_version @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script takes a repository name as variable, # then checks if the commit tag variable (if any) # is a valid semver & echoes that valid semver. @@ -28,7 +28,7 @@ if [[ "$COMMIT_TAG" == *"/"* ]]; then COMMIT_TAG_VERSION="${COMMIT_TAG#*/}" echo "Tag was made for: $REPO_NAME" >&2 echo "Version: $COMMIT_TAG_VERSION" >&2 - + # Check if REPO_NAME is equal to REPOSITORY if [[ "$REPO_NAME" != "$REPOSITORY" ]]; then echo "REPO_NAME ($REPO_NAME) does not match REPOSITORY ($REPOSITORY). Exiting..." >&2 diff --git a/build-system/scripts/image_exists b/build-system/scripts/image_exists index 0a4bee54d7a..0ad9d90ae69 100755 --- a/build-system/scripts/image_exists +++ b/build-system/scripts/image_exists @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Returns true if the given image exists in the current ECR. aws ecr describe-images --region=$ECR_REGION --repository-name=$1 --image-ids=imageTag=$2 > /dev/null 2>&1 diff --git a/build-system/scripts/init_submodules b/build-system/scripts/init_submodules index a4a9cd62db6..0b89b089866 100755 --- a/build-system/scripts/init_submodules +++ b/build-system/scripts/init_submodules @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # For a given repository, init any required submodules. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -euo pipefail diff --git a/build-system/scripts/query_manifest b/build-system/scripts/query_manifest index cd0b5c0888b..a6e84650331 100755 --- a/build-system/scripts/query_manifest +++ b/build-system/scripts/query_manifest @@ -1,10 +1,10 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu CMD=$1 REPO=$2 -ROOT_PATH=${ROOT_PATH:-$PWD} +ROOT_PATH=${ROOT_PATH:-$(git rev-parse --show-toplevel)} MANIFEST=$ROOT_PATH/build_manifest.yml if [ $(yq "has(\"$REPO\")" $MANIFEST) == "false" ]; then diff --git a/build-system/scripts/remote_run_script b/build-system/scripts/remote_run_script index e9ef84d723d..8e1d8adf38c 100755 --- a/build-system/scripts/remote_run_script +++ b/build-system/scripts/remote_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Copies the runner script to the remote instance, runs it giving it script and args to run. # The runner script checks out the repository first and runs setup-env. # diff --git a/build-system/scripts/remote_runner b/build-system/scripts/remote_runner index ce1a567a198..6283050283c 100755 --- a/build-system/scripts/remote_runner +++ b/build-system/scripts/remote_runner @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/request_spot b/build-system/scripts/request_spot index 35c4d650bac..3d669fed1df 100755 --- a/build-system/scripts/request_spot +++ b/build-system/scripts/request_spot @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/build-system/scripts/setup_env b/build-system/scripts/setup_env index 6a166d97236..3486e5f8653 100755 --- a/build-system/scripts/setup_env +++ b/build-system/scripts/setup_env @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script sets up the global build environment. This should be called before any other build scripts, # as the other build scripts assume these global variables are set. The global variables are written to # the file in $BASH_ENV, which means that any new bash shells launched within the lifetime of the machine @@ -16,8 +16,9 @@ BRANCH=${5:-} PULL_REQUEST=${6:-} BASH_ENV=${BASH_ENV:-} +ROOT_PATH=$(git rev-parse --show-toplevel) BUILD_SYSTEM_PATH=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) -PROJECT=$(cat PROJECT) +PROJECT=$(cat $ROOT_PATH/PROJECT) COMMIT_MESSAGE=$(git log -n 1 --pretty=format:"%s" $COMMIT_HASH) PATH=$PATH:$BUILD_SYSTEM_PATH/scripts @@ -64,7 +65,7 @@ if [ -z "$BASH_ENV" ]; then BASH_ENV=$(mktemp) fi -echo export ROOT_PATH=$PWD >> $BASH_ENV +echo export ROOT_PATH=$ROOT_PATH >> $BASH_ENV echo export BUILD_SYSTEM_PATH=$BUILD_SYSTEM_PATH >> $BASH_ENV echo export DOCKER_BUILDKIT=${DOCKER_BUILDKIT:-1} >> $BASH_ENV echo export BUILDKIT_PROGRESS=plain >> $BASH_ENV diff --git a/build-system/scripts/should_deploy b/build-system/scripts/should_deploy index 53f295b191f..283191f51f3 100755 --- a/build-system/scripts/should_deploy +++ b/build-system/scripts/should_deploy @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Returns success if we are expected to do a deployment. # Right now, that's only if we're master. set -eu diff --git a/build-system/scripts/should_release b/build-system/scripts/should_release index 5473fc6da76..701b8a375db 100755 --- a/build-system/scripts/should_release +++ b/build-system/scripts/should_release @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Returns true if we are expected to proceed with a release job. # Specifically if we have a commit tag, are master, or are being forced to release. # This script should be used at the start of all release steps to early out PR runs. diff --git a/build-system/scripts/spot_run_script b/build-system/scripts/spot_run_script index 7a2377973d5..d5e77e35712 100755 --- a/build-system/scripts/spot_run_script +++ b/build-system/scripts/spot_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Runs a test script on a remote spot instance. Arguments are: # 1. NAME: Used to identify spot jobs. # 2. CPUS: Number of cpus on spot request. diff --git a/build-system/scripts/tag_remote_image b/build-system/scripts/tag_remote_image index bc7675b237c..0bb9e2a390f 100755 --- a/build-system/scripts/tag_remote_image +++ b/build-system/scripts/tag_remote_image @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu @@ -34,7 +34,7 @@ if [ "$EXISTING_TAG_MANIFEST" != "$NEW_TAG_MANIFEST" ]; then --image-tag $NEW_TAG \ --image-manifest "$EXISTING_TAG_MANIFEST" 2>&1) TAG_EXIT_CODE=$? - + # If we failed to tag due to too many tags on this image, then clean some of them up and try again if [ $TAG_EXIT_CODE -ne 0 ] && $(echo $TAG_RESULT | grep -q LimitExceededException); then echo "Failed to tag due to limit exceeded. Starting tag cleanup." diff --git a/build-system/scripts/untag_remote_image b/build-system/scripts/untag_remote_image index f79fee729bc..aa913c1200b 100755 --- a/build-system/scripts/untag_remote_image +++ b/build-system/scripts/untag_remote_image @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash REPOSITORY=$1 TAG=$2 aws ecr batch-delete-image --region=$ECR_REGION --repository-name $REPOSITORY --image-ids imageTag=$2 > /dev/null \ No newline at end of file diff --git a/build-system/scripts/upload_logs_to_s3 b/build-system/scripts/upload_logs_to_s3 index ede934edae5..25c7fe2e50a 100755 --- a/build-system/scripts/upload_logs_to_s3 +++ b/build-system/scripts/upload_logs_to_s3 @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Uploads to S3 the contents of the log file mounted on the end-to-end container, # which contains log entries with an associated event and metrics for it. diff --git a/build-system/start_interactive b/build-system/start_interactive new file mode 100755 index 00000000000..64e24499782 --- /dev/null +++ b/build-system/start_interactive @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +# Starts an interactive shell with the build system initialised. +# Good for playing around with build system on development machines. + +source $(dirname "$0")/scripts/setup_env '' '' mainframe_$USER > /dev/null +PROMPT_LEAN_LEFT=augment_prompt $SHELL \ No newline at end of file diff --git a/build_manifest.yml b/build_manifest.yml index 588c3c70a93..43bce1a4071 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -216,4 +216,4 @@ docs: yellow-paper: buildDir: yellow-paper rebuildPatterns: - - ^yellow-paper/ \ No newline at end of file + - ^yellow-paper/ diff --git a/circuits/cpp/bootstrap.sh b/circuits/cpp/bootstrap.sh index 6a64d541b7b..440bf1e44c6 100755 --- a/circuits/cpp/bootstrap.sh +++ b/circuits/cpp/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/circuits/cpp/format.sh b/circuits/cpp/format.sh index e12e84ab27a..54047bd6def 100755 --- a/circuits/cpp/format.sh +++ b/circuits/cpp/format.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [ "${1:-}" == "staged" ]; then diff --git a/circuits/cpp/scripts/build_run_tests_docker_local b/circuits/cpp/scripts/build_run_tests_docker_local index 19eb1784f71..f861fecbfa8 100755 --- a/circuits/cpp/scripts/build_run_tests_docker_local +++ b/circuits/cpp/scripts/build_run_tests_docker_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/collect_coverage_information.sh b/circuits/cpp/scripts/collect_coverage_information.sh index cac9393a9b6..45ad2764a22 100755 --- a/circuits/cpp/scripts/collect_coverage_information.sh +++ b/circuits/cpp/scripts/collect_coverage_information.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Check that the correct number of args have been provided if [ $# -ne 2 ]; then @@ -67,7 +67,7 @@ if [ ${#non_empty_profiles[@]} -gt 1 ]; then additional_objects+="-object $WORKING_DIRECTORY/bin/${non_empty_profile_base}_tests " done object_string=${additional_objects#"-object"} - + # Output the coverage report into `all_tests_coverage_report` folder rm -rf "$WORKING_DIRECTORY/all_tests_coverage_report" mkdir "$WORKING_DIRECTORY/all_tests_coverage_report" diff --git a/circuits/cpp/scripts/run_tests b/circuits/cpp/scripts/run_tests index dd7d7e21388..62fbd90153b 100755 --- a/circuits/cpp/scripts/run_tests +++ b/circuits/cpp/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/run_tests_local b/circuits/cpp/scripts/run_tests_local index d65ff5f358f..4c12807414e 100755 --- a/circuits/cpp/scripts/run_tests_local +++ b/circuits/cpp/scripts/run_tests_local @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/circuits/cpp/scripts/tidy.sh b/circuits/cpp/scripts/tidy.sh index 3283fb89b62..7faab2658c6 100755 --- a/circuits/cpp/scripts/tidy.sh +++ b/circuits/cpp/scripts/tidy.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Run clang-tidy on all C++ source files diff --git a/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp b/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp index 7d07d16fe0a..c1e37ae856d 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp @@ -26,7 +26,7 @@ template struct BlockHeader { fr nullifier_tree_root = 0; fr contract_tree_root = 0; fr l1_to_l2_messages_tree_root = 0; - fr blocks_tree_root = 0; + fr archive_root = 0; fr private_kernel_vk_tree_root = 0; // TODO: future enhancement // Public data @@ -38,7 +38,7 @@ template struct BlockHeader { nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, - blocks_tree_root, + archive_root, private_kernel_vk_tree_root, public_data_tree_root, global_variables_hash); @@ -47,8 +47,7 @@ template struct BlockHeader { { return note_hash_tree_root == other.note_hash_tree_root && nullifier_tree_root == other.nullifier_tree_root && contract_tree_root == other.contract_tree_root && - l1_to_l2_messages_tree_root == other.l1_to_l2_messages_tree_root && - blocks_tree_root == other.blocks_tree_root && + l1_to_l2_messages_tree_root == other.l1_to_l2_messages_tree_root && archive_root == other.archive_root && private_kernel_vk_tree_root == other.private_kernel_vk_tree_root && public_data_tree_root == other.public_data_tree_root && global_variables_hash == other.global_variables_hash; @@ -62,7 +61,7 @@ template struct BlockHeader { nullifier_tree_root.assert_is_zero(); contract_tree_root.assert_is_zero(); l1_to_l2_messages_tree_root.assert_is_zero(); - blocks_tree_root.assert_is_zero(); + archive_root.assert_is_zero(); private_kernel_vk_tree_root.assert_is_zero(); public_data_tree_root.assert_is_zero(); global_variables_hash.assert_is_zero(); @@ -77,7 +76,7 @@ template struct BlockHeader { BlockHeader> data = { to_ct(note_hash_tree_root), to_ct(nullifier_tree_root), to_ct(contract_tree_root), - to_ct(l1_to_l2_messages_tree_root), to_ct(blocks_tree_root), to_ct(private_kernel_vk_tree_root), + to_ct(l1_to_l2_messages_tree_root), to_ct(archive_root), to_ct(private_kernel_vk_tree_root), to_ct(public_data_tree_root), to_ct(global_variables_hash), }; @@ -91,7 +90,7 @@ template struct BlockHeader { BlockHeader data = { to_nt(note_hash_tree_root), to_nt(nullifier_tree_root), to_nt(contract_tree_root), - to_nt(l1_to_l2_messages_tree_root), to_nt(blocks_tree_root), to_nt(private_kernel_vk_tree_root), + to_nt(l1_to_l2_messages_tree_root), to_nt(archive_root), to_nt(private_kernel_vk_tree_root), to_nt(public_data_tree_root), to_nt(global_variables_hash), }; @@ -106,7 +105,7 @@ template struct BlockHeader { nullifier_tree_root.set_public(); contract_tree_root.set_public(); l1_to_l2_messages_tree_root.set_public(); - blocks_tree_root.set_public(); + archive_root.set_public(); private_kernel_vk_tree_root.set_public(); public_data_tree_root.set_public(); global_variables_hash.set_public(); @@ -118,8 +117,8 @@ template struct BlockHeader { nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, - blocks_tree_root, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as - // it is not present in noir, + archive_root, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as + // it is not present in noir, public_data_tree_root, global_variables_hash }; } diff --git a/circuits/cpp/src/aztec3/circuits/abis/packers.hpp b/circuits/cpp/src/aztec3/circuits/abis/packers.hpp index 2ee93840908..cc73e4e9a15 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/packers.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/packers.hpp @@ -59,7 +59,7 @@ struct ConstantsPacker { NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT), NVP(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp index 380b1a2eec4..c50cb0e5572 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp @@ -21,7 +21,7 @@ template struct BaseRollupInputs { AppendOnlyTreeSnapshot start_nullifier_tree_snapshot{}; AppendOnlyTreeSnapshot start_contract_tree_snapshot{}; fr start_public_data_tree_root{}; - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; std::array, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP> low_nullifier_leaf_preimages{}; std::array, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP> @@ -37,8 +37,7 @@ template struct BaseRollupInputs { std::array, MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP> new_public_data_reads_sibling_paths{}; - std::array, KERNELS_PER_BASE_ROLLUP> - blocks_tree_root_membership_witnesses{}; + std::array, KERNELS_PER_BASE_ROLLUP> archive_root_membership_witnesses{}; ConstantRollupData constants{}; @@ -48,7 +47,7 @@ template struct BaseRollupInputs { start_nullifier_tree_snapshot, start_contract_tree_snapshot, start_public_data_tree_root, - start_blocks_tree_snapshot, + start_archive_snapshot, low_nullifier_leaf_preimages, low_nullifier_membership_witness, new_commitments_subtree_sibling_path, @@ -56,7 +55,7 @@ template struct BaseRollupInputs { new_contracts_subtree_sibling_path, new_public_data_update_requests_sibling_paths, new_public_data_reads_sibling_paths, - blocks_tree_root_membership_witnesses, + archive_root_membership_witnesses, constants); boolean operator==(BaseRollupInputs const& other) const diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp index a1cb37b6a66..e77d45a492f 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp @@ -11,7 +11,7 @@ template struct ConstantRollupData { using fr = typename NCT::fr; // The very latest roots as at the very beginning of the entire rollup: - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; // Some members of this struct tbd: fr private_kernel_vk_tree_root = 0; @@ -21,7 +21,7 @@ template struct ConstantRollupData { GlobalVariables global_variables{}; - MSGPACK_FIELDS(start_blocks_tree_snapshot, + MSGPACK_FIELDS(start_archive_snapshot, private_kernel_vk_tree_root, public_kernel_vk_tree_root, base_rollup_vk_hash, diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp index c765c9d09ad..f57aa95da7a 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp @@ -25,16 +25,16 @@ template struct RootRollupInputs { AppendOnlyTreeSnapshot start_l1_to_l2_messages_tree_snapshot{}; // inputs required to add the block hash - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; - std::array new_blocks_tree_sibling_path{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; + std::array new_archive_sibling_path{}; // For serialization, update with new fields MSGPACK_FIELDS(previous_rollup_data, new_l1_to_l2_messages, new_l1_to_l2_messages_tree_root_sibling_path, start_l1_to_l2_messages_tree_snapshot, - start_blocks_tree_snapshot, - new_blocks_tree_sibling_path); + start_archive_snapshot, + new_archive_sibling_path); bool operator==(RootRollupInputs const&) const = default; }; diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp index 74aff1ba5e8..4392632b69b 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp @@ -43,8 +43,8 @@ template struct RootRollupPublicInputs { AppendOnlyTreeSnapshot start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot{}; AppendOnlyTreeSnapshot end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot{}; - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; - AppendOnlyTreeSnapshot end_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; + AppendOnlyTreeSnapshot end_archive_snapshot{}; std::array calldata_hash{}; std::array l1_to_l2_messages_hash{}; @@ -68,8 +68,8 @@ template struct RootRollupPublicInputs { end_l1_to_l2_messages_tree_snapshot, start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, - start_blocks_tree_snapshot, - end_blocks_tree_snapshot, + start_archive_snapshot, + end_archive_snapshot, calldata_hash, l1_to_l2_messages_hash); @@ -88,7 +88,7 @@ template struct RootRollupPublicInputs { write(buf, start_public_data_tree_root); write(buf, start_l1_to_l2_messages_tree_snapshot); write(buf, start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot); - write(buf, start_blocks_tree_snapshot); + write(buf, start_archive_snapshot); write(buf, end_note_hash_tree_snapshot); write(buf, end_nullifier_tree_snapshot); write(buf, end_contract_tree_snapshot); @@ -97,7 +97,7 @@ template struct RootRollupPublicInputs { write(buf, end_public_data_tree_root); write(buf, end_l1_to_l2_messages_tree_snapshot); write(buf, end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot); - write(buf, end_blocks_tree_snapshot); + write(buf, end_archive_snapshot); // Stitching calldata hash together auto high_buffer = calldata_hash[0].to_buffer(); diff --git a/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp b/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp index 0e9c5394aa8..66614035e65 100644 --- a/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp @@ -333,7 +333,7 @@ PublicKernelInputs get_kernel_inputs_with_previous_kernel(NT::boolean privat .nullifier_tree_root = ++seed, .contract_tree_root = ++seed, .l1_to_l2_messages_tree_root = ++seed, - .blocks_tree_root = ++seed, + .archive_root = ++seed, .private_kernel_vk_tree_root = ++seed, .public_data_tree_root = ++seed, .global_variables_hash = ++seed, diff --git a/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp b/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp index 652e6b0ca52..1a5952cc4c4 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp @@ -628,7 +628,7 @@ TEST_F(base_rollup_tests, native_calldata_hash) // run_cbind(inputs, outputs); } -TEST_F(base_rollup_tests, native_compute_membership_blocks_tree_negative) +TEST_F(base_rollup_tests, native_compute_membership_archive_negative) { // WRITE a negative test that will fail the inclusion proof @@ -639,15 +639,15 @@ TEST_F(base_rollup_tests, native_compute_membership_blocks_tree_negative) BaseRollupInputs inputs = base_rollup_inputs_from_kernels(kernel_data); MemoryStore blocks_store; - auto blocks_tree = MerkleTree(blocks_store, BLOCKS_TREE_HEIGHT); + auto archive = MerkleTree(blocks_store, ARCHIVE_HEIGHT); // Create an INCORRECT sibling path for the note hash tree root in the historical tree roots. - auto hash_path = blocks_tree.get_sibling_path(0); - std::array sibling_path{}; - for (size_t i = 0; i < BLOCKS_TREE_HEIGHT; ++i) { + auto hash_path = archive.get_sibling_path(0); + std::array sibling_path{}; + for (size_t i = 0; i < ARCHIVE_HEIGHT; ++i) { sibling_path[i] = hash_path[i] + 1; } - inputs.blocks_tree_root_membership_witnesses[0] = { + inputs.archive_root_membership_witnesses[0] = { .leaf_index = 0, .sibling_path = sibling_path, }; diff --git a/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp b/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp index 1b2fa67ea7d..d6a7cb4af0e 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp @@ -132,11 +132,11 @@ NT::fr calculate_commitments_subtree(DummyBuilder& builder, BaseRollupInputs con * @param constantBaseRollupData * @param baseRollupInputs */ -void perform_blocks_tree_membership_checks(DummyBuilder& builder, BaseRollupInputs const& baseRollupInputs) +void perform_archive_membership_checks(DummyBuilder& builder, BaseRollupInputs const& baseRollupInputs) { // For each of the historical_note_hash_tree_membership_checks, we need to do an inclusion proof // against the historical root provided in the rollup constants - auto historical_root = baseRollupInputs.constants.start_blocks_tree_snapshot.root; + auto historical_root = baseRollupInputs.constants.start_archive_snapshot.root; for (size_t i = 0; i < 2; i++) { // Rebuild the block hash @@ -155,8 +155,8 @@ void perform_blocks_tree_membership_checks(DummyBuilder& builder, BaseRollupInpu l1_to_l2_messages_tree_root, public_data_tree_root); - abis::MembershipWitness const historical_root_witness = - baseRollupInputs.blocks_tree_root_membership_witnesses[i]; + abis::MembershipWitness const historical_root_witness = + baseRollupInputs.archive_root_membership_witnesses[i]; check_membership(builder, previous_block_hash, @@ -524,7 +524,7 @@ BaseOrMergeRollupPublicInputs base_rollup_circuit(DummyBuilder& builder, BaseRol components::compute_kernels_calldata_hash(baseRollupInputs.kernel_data); // Perform membership checks that the notes provided exist within the historical trees data - perform_blocks_tree_membership_checks(builder, baseRollupInputs); + perform_archive_membership_checks(builder, baseRollupInputs); AggregationObject const aggregation_object = aggregate_proofs(baseRollupInputs); diff --git a/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp b/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp index ed6f6ca0c25..50f85996946 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp @@ -176,8 +176,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) // Create initial nullifier tree with 32 initial nullifiers auto nullifier_tree = get_initial_nullifier_tree_empty(); - MemoryStore blocks_tree_store; - MerkleTree blocks_tree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive(archive_store, ARCHIVE_HEIGHT); std::array kernels = { get_empty_kernel(), get_empty_kernel(), get_empty_kernel(), get_empty_kernel() @@ -192,9 +192,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) contract_tree.root(), l1_to_l2_messages_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, start_block_hash); - AppendOnlyTreeSnapshot start_blocks_tree_snapshot = { .root = blocks_tree.root(), - .next_available_leaf_index = 1 }; + archive.update_element(0, start_block_hash); + AppendOnlyTreeSnapshot start_archive_snapshot = { .root = archive.root(), .next_available_leaf_index = 1 }; // Create commitments for (size_t kernel_j = 0; kernel_j < 4; kernel_j++) { @@ -243,9 +242,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) contract_tree.root(), l1_to_l2_messages_tree.root(), public_data_tree.root()); - blocks_tree.update_element(1, end_block_hash); - AppendOnlyTreeSnapshot end_blocks_tree_snapshot = { .root = blocks_tree.root(), - .next_available_leaf_index = 2 }; + archive.update_element(1, end_block_hash); + AppendOnlyTreeSnapshot end_archive_snapshot = { .root = archive.root(), .next_available_leaf_index = 2 }; // Compute the end snapshot AppendOnlyTreeSnapshot const end_l1_to_l2_messages_tree_snapshot = { .root = l1_to_l2_messages_tree.root(), @@ -292,8 +290,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) rootRollupInputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.end_contract_tree_snapshot); ASSERT_EQ(outputs.end_l1_to_l2_messages_tree_snapshot, end_l1_to_l2_messages_tree_snapshot); - ASSERT_EQ(outputs.start_blocks_tree_snapshot, start_blocks_tree_snapshot); - ASSERT_EQ(outputs.end_blocks_tree_snapshot, end_blocks_tree_snapshot); + ASSERT_EQ(outputs.start_archive_snapshot, start_archive_snapshot); + ASSERT_EQ(outputs.end_archive_snapshot, end_archive_snapshot); // Compute the expected calldata hash for the root rollup (including the l2 -> l1 messages) auto left = components::compute_kernels_calldata_hash({ kernels[0], kernels[1] }); diff --git a/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp b/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp index 3c801da601f..b4f876263d9 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp @@ -116,10 +116,10 @@ RootRollupPublicInputs root_rollup_circuit(DummyBuilder& builder, RootRollupInpu right.end_public_data_tree_root); // Update the blocks tree - auto end_blocks_tree_snapshot = components::insert_subtree_to_snapshot_tree( + auto end_archive_snapshot = components::insert_subtree_to_snapshot_tree( builder, - rootRollupInputs.start_blocks_tree_snapshot, - rootRollupInputs.new_blocks_tree_sibling_path, + rootRollupInputs.start_archive_snapshot, + rootRollupInputs.new_archive_sibling_path, fr::zero(), block_hash, 0, @@ -140,8 +140,8 @@ RootRollupPublicInputs root_rollup_circuit(DummyBuilder& builder, RootRollupInpu .end_public_data_tree_root = right.end_public_data_tree_root, .start_l1_to_l2_messages_tree_snapshot = rootRollupInputs.start_l1_to_l2_messages_tree_snapshot, .end_l1_to_l2_messages_tree_snapshot = new_l1_to_l2_messages_tree_snapshot, - .start_blocks_tree_snapshot = rootRollupInputs.start_blocks_tree_snapshot, - .end_blocks_tree_snapshot = end_blocks_tree_snapshot, + .start_archive_snapshot = rootRollupInputs.start_archive_snapshot, + .end_archive_snapshot = end_archive_snapshot, .calldata_hash = components::compute_calldata_hash(rootRollupInputs.previous_rollup_data), .l1_to_l2_messages_hash = compute_messages_hash(rootRollupInputs.new_l1_to_l2_messages) }; diff --git a/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp b/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp index 72fee73a733..8526c02dd68 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp @@ -86,8 +86,8 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne { // @todo Look at the starting points for all of these. // By supporting as inputs we can make very generic tests, where it is trivial to try new setups. - MemoryStore blocks_tree_store; - MerkleTree blocks_tree = MerkleTree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive = MerkleTree(archive_store, ARCHIVE_HEIGHT); BaseRollupInputs baseRollupInputs = { .kernel_data = kernel_data, @@ -161,10 +161,10 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne contract_tree.root(), l1_to_l2_msg_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, block_hash); + archive.update_element(0, block_hash); - ConstantRollupData const constantRollupData = { .start_blocks_tree_snapshot = { - .root = blocks_tree.root(), + ConstantRollupData const constantRollupData = { .start_archive_snapshot = { + .root = archive.root(), .next_available_leaf_index = 1, } }; baseRollupInputs.constants = constantRollupData; @@ -176,7 +176,7 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne kernel_data[i].public_inputs.constants.block_header.nullifier_tree_root = nullifier_tree.root(); kernel_data[i].public_inputs.constants.block_header.contract_tree_root = contract_tree.root(); kernel_data[i].public_inputs.constants.block_header.l1_to_l2_messages_tree_root = l1_to_l2_msg_tree.root(); - kernel_data[i].public_inputs.constants.block_header.blocks_tree_root = blocks_tree.root(); + kernel_data[i].public_inputs.constants.block_header.archive_root = archive.root(); kernel_data[i].public_inputs.constants.block_header.public_data_tree_root = public_data_tree.root(); kernel_data[i].public_inputs.constants.block_header.global_variables_hash = prev_global_variables_hash; } @@ -208,12 +208,11 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne } // Get historical_root sibling paths - baseRollupInputs.blocks_tree_root_membership_witnesses[0] = { + baseRollupInputs.archive_root_membership_witnesses[0] = { .leaf_index = 0, - .sibling_path = get_sibling_path(blocks_tree, 0, 0), + .sibling_path = get_sibling_path(archive, 0, 0), }; - baseRollupInputs.blocks_tree_root_membership_witnesses[1] = - baseRollupInputs.blocks_tree_root_membership_witnesses[0]; + baseRollupInputs.archive_root_membership_witnesses[1] = baseRollupInputs.archive_root_membership_witnesses[0]; baseRollupInputs.kernel_data = kernel_data; @@ -378,8 +377,8 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, MemoryStore public_data_tree_store; MerkleTree public_data_tree(public_data_tree_store, PUBLIC_DATA_TREE_HEIGHT); - MemoryStore blocks_tree_store; - MerkleTree blocks_tree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive(archive_store, ARCHIVE_HEIGHT); // Start blocks tree auto block_hash = compute_block_hash_with_globals(globals, @@ -388,16 +387,16 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, contract_tree.root(), l1_to_l2_msg_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, block_hash); + archive.update_element(0, block_hash); // Blocks tree snapshots - AppendOnlyTreeSnapshot const start_blocks_tree_snapshot = { - .root = blocks_tree.root(), + AppendOnlyTreeSnapshot const start_archive_snapshot = { + .root = archive.root(), .next_available_leaf_index = 1, }; // Blocks tree - auto blocks_tree_sibling_path = get_sibling_path(blocks_tree, 1, 0); + auto archive_sibling_path = get_sibling_path(archive, 1, 0); // l1 to l2 tree auto l1_to_l2_tree_sibling_path = @@ -414,8 +413,8 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, .new_l1_to_l2_messages = l1_to_l2_messages, .new_l1_to_l2_messages_tree_root_sibling_path = l1_to_l2_tree_sibling_path, .start_l1_to_l2_messages_tree_snapshot = start_l1_to_l2_msg_tree_snapshot, - .start_blocks_tree_snapshot = start_blocks_tree_snapshot, - .new_blocks_tree_sibling_path = blocks_tree_sibling_path, + .start_archive_snapshot = start_archive_snapshot, + .new_archive_sibling_path = archive_sibling_path, }; return rootRollupInputs; } diff --git a/circuits/cpp/src/aztec3/constants.hpp b/circuits/cpp/src/aztec3/constants.hpp index 9ecb0b384d8..358164687e5 100644 --- a/circuits/cpp/src/aztec3/constants.hpp +++ b/circuits/cpp/src/aztec3/constants.hpp @@ -104,7 +104,7 @@ constexpr size_t NOTE_HASH_TREE_HEIGHT = 32; constexpr size_t PUBLIC_DATA_TREE_HEIGHT = 254; constexpr size_t NULLIFIER_TREE_HEIGHT = 20; constexpr size_t L1_TO_L2_MSG_TREE_HEIGHT = 16; -constexpr size_t BLOCKS_TREE_HEIGHT = 16; +constexpr size_t ARCHIVE_HEIGHT = 16; constexpr size_t ROLLUP_VK_TREE_HEIGHT = 8; // TODO: update diff --git a/cspell.json b/cspell.json index 874cc050f0c..22bb21ec9f1 100644 --- a/cspell.json +++ b/cspell.json @@ -42,6 +42,8 @@ "cimg", "clonedeep", "clonedeepwith", + "cmd", + "cmds", "codegen", "comlink", "composability", diff --git a/docs/deploy_netlify.sh b/docs/deploy_netlify.sh index 82b447119f6..24c7c1f3845 100755 --- a/docs/deploy_netlify.sh +++ b/docs/deploy_netlify.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/docs/docs/about_aztec/overview.mdx b/docs/docs/about_aztec/overview.mdx index 3fceee95c47..9bd2afacf19 100644 --- a/docs/docs/about_aztec/overview.mdx +++ b/docs/docs/about_aztec/overview.mdx @@ -33,7 +33,7 @@ Watch Zac, CEO of Aztec, describe our approach to building a privacy preserving ### Private-public Composability -You can watch Mike, Aztec PM, talk about public-private composablity in Aztec at Devcon here. +You can watch Mike, Aztec PM, talk about public-private composability in Aztec at Devcon here. vk relationships. diff --git a/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md b/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md index 751b209dc9b..50ea9ba1146 100644 --- a/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md +++ b/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md @@ -14,7 +14,7 @@ This page will answer: - How indexed merkle trees work - How they can be used for membership exclusion proofs - How they can leverage batch insertions -- Tradoffs of using indexed merkle trees +- Tradeoffs of using indexed merkle trees The content was also covered in a presentation for the [Privacy + Scaling Explorations team at the Ethereum Foundation](https://pse.dev/). @@ -34,7 +34,7 @@ A sparse merkle tree (not every leaf stores a value): -In order to spend / modify a note in the private state tree, one must create a nullifier for it, and prove that the nullifier does not already exist in the nullifier tree. As nullifier trees are modelled as sparse merkle trees, non membership checks are (conceptually) trivial. +In order to spend / modify a note in the private state tree, one must create a nullifier for it, and prove that the nullifier does not already exist in the nullifier tree. As nullifier trees are modeled as sparse merkle trees, non membership checks are (conceptually) trivial. Data is stored at the leaf index corresponding to its value. E.g. if I have a sparse tree that can contain $2^{256}$ values and want to prove non membership of the value $2^{128}$. I can prove via a merkle membership proof that $tree\_values[2^{128}] = 0$, conversely if I can prove that $tree\_values[2^{128}] == 1$ I can prove that the item exists. @@ -155,8 +155,8 @@ Suppose we want to show that the value `20` doesn't exist in the tree. We just r - Special case, the low leaf is at the very end, so the new_value must be higher than all values in the tree: - $assert(low\_nullifier_{\textsf{value}} < new\_value_{\textsf{value}})$ - Else: - - $assert(low\_nullifier_{\textsf{value}} < low\_nullifier_{\textsf{value}})$ - - $assert(low\_nullifier_{\textsf{next\_value}} > low\_nullifier_{\textsf{value}})$ + - $assert(low\_nullifier_{\textsf{value}} < new\_value_{\textsf{value}})$ + - $assert(low\_nullifier_{\textsf{next\_value}} > new\_value_{\textsf{value}})$ This is already a massive performance improvement, however we can go further, as this tree is not sparse. We can perform batch insertions. @@ -282,7 +282,7 @@ From looking at the code above we can probably deduce why we need pending insert To perform batched insertions, our circuit must keep track of all values that are pending insertion. -- If the `low_nullifier_membership_witness` is identified to be nonsense ( all zeros, or has a leaf index of -1 ) we will know that this is an pending low nullifier read request and we will have to look within our pending subtree for the nearest low nullifier. +- If the `low_nullifier_membership_witness` is identified to be nonsense ( all zeros, or has a leaf index of -1 ) we will know that this is a pending low nullifier read request and we will have to look within our pending subtree for the nearest low nullifier. - Loop back through all "pending_insertions" - If the pending insertion value is lower than the nullifier we are trying to insert - If the pending insertion value is NOT found, then out circuit is invalid and should self abort. diff --git a/docs/docs/concepts/advanced/public_vm.md b/docs/docs/concepts/advanced/public_vm.md index a19a5625dfb..df93a070c6e 100644 --- a/docs/docs/concepts/advanced/public_vm.md +++ b/docs/docs/concepts/advanced/public_vm.md @@ -69,7 +69,7 @@ It verifies a _verifier circuit_ that verifies a public function proof! Why? Modularity, ease of development, backwards compatibility support. -Proceed with following development phases: +Proceed with the following development phases: #### Phase 0: Full Proverless diff --git a/docs/docs/concepts/advanced/sequencer_selection.md b/docs/docs/concepts/advanced/sequencer_selection.md index 4bf211eb4c1..72937391468 100644 --- a/docs/docs/concepts/advanced/sequencer_selection.md +++ b/docs/docs/concepts/advanced/sequencer_selection.md @@ -40,7 +40,7 @@ During the initial proposal phase, proposers submit to L1 a **block commitment** - Identifier of the previous block in the chain. - The output of the VRF for this sequencer. -At the end of the proposal phase, the sequencer with the highest score submitted becomes the leader for this cycle, and has exclusive rights to deciding the contents of the block. Note that this plays nicely with private mempools, since having exclusive rights allows the leader to disclose private transaction data in the reveal phase. +At the end of the proposal phase, the sequencer with the highest score submitted becomes the leader for this cycle, and has exclusive rights to decide the contents of the block. Note that this plays nicely with private mempools, since having exclusive rights allows the leader to disclose private transaction data in the reveal phase. > _In the original version of Fernet, multiple competing proposals could enter the proving phase. Read more about the rationale for this change [here](https://hackmd.io/0cI_xVsaSVi7PToCJ9A2Ew?both#Mitigation-Elect-single-leader-after-proposal-phase)._ @@ -79,15 +79,15 @@ The only way to trigger an L2 reorg (without an L1 one) is if block N is reveale ![](https://hackmd.io/_uploads/HkMDHxxC2.png) -To mitigate the effect of wasted effort by all sequencers from block N+1 until the reorg, we could implement uncle rewards for these sequencers. And if we are comfortable with slashing, take those rewards out of the pocket of the sequencer that failed to finalise their block. +To mitigate the effect of wasted effort by all sequencers from block N+1 until the reorg, we could implement uncle rewards for these sequencers. And if we are comfortable with slashing, take those rewards out of the pocket of the sequencer that failed to finalize their block. ## Batching > _Read more approaches to batching [here](https://hackmd.io/0cI_xVsaSVi7PToCJ9A2Ew?both#Batching)._ -As an extension to the protocol, we can bake in batching of multiple blocks. Rather than creating one proof per block, we can aggregate multiple blocks into a single proof, in order to amortise the cost of verifying the root rollup ZKP on L1, thus reducing fees. +As an extension to the protocol, we can bake in batching of multiple blocks. Rather than creating one proof per block, we can aggregate multiple blocks into a single proof, in order to amortize the cost of verifying the root rollup ZKP on L1, thus reducing fees. -The tradeoff in batching is delayed finalisation: if we are not posting proofs to L1 for every block, then the network needs to wait until the batch proof is submitted for finalisation. This can also lead to deeper L2 reorgs. +The tradeoff in batching is delayed finalization: if we are not posting proofs to L1 for every block, then the network needs to wait until the batch proof is submitted for finalization. This can also lead to deeper L2 reorgs. In a batching model, proving for each block happens immediately as the block is revealed, same as usual. But the resulting proof is not submitted to L1: instead, it is aggregated into the proof of the next block. diff --git a/docs/docs/concepts/foundation/accounts/keys.md b/docs/docs/concepts/foundation/accounts/keys.md index 589667cbab3..cd8744d0e62 100644 --- a/docs/docs/concepts/foundation/accounts/keys.md +++ b/docs/docs/concepts/foundation/accounts/keys.md @@ -65,7 +65,7 @@ A side effect of enshrining and encoding privacy keys into the account address i ### Encryption keys -The privacy master key is used to derive encryption keys. Encryption keys, as their name imply, are used for encrypting private notes for a recipient, where the public key is used for encryption and the corresponding private key used for decryption. +The privacy master key is used to derive encryption keys. Encryption keys, as their name implies, are used for encrypting private notes for a recipient, where the public key is used for encryption and the corresponding private key used for decryption. In a future version, encryption keys will be differentiated between incoming and outgoing. When sending a note to another user, the sender will use the recipient's incoming encryption key for encrypting the data for them, and will optionally use their own outgoing encryption key for encrypting any data about the destination of that note. This is useful for reconstructing transaction history from on-chain data. For example, during a token transfer, the token contract may dictate that the sender encrypts the note with value with the recipient's incoming key, but also records the transfer with its own outgoing key for bookkeeping purposes. @@ -106,4 +106,4 @@ Nevertheless, the attacker cannot steal the affected user's funds, since authent :::info Note that, in the current architecture, the user's wallet needs direct access to the privacy private key, since the wallet needs to use this key for attempting decryption of all notes potentially sent to the user. This means that the privacy private key cannot be stored in a hardware wallet or hardware security module, since the wallet software uses the private key material directly. This may change in future versions in order to enhance security. -::: \ No newline at end of file +::: diff --git a/docs/docs/concepts/foundation/communication/cross_chain_calls.md b/docs/docs/concepts/foundation/communication/cross_chain_calls.md index 44e952a68c1..bff7ed5b936 100644 --- a/docs/docs/concepts/foundation/communication/cross_chain_calls.md +++ b/docs/docs/concepts/foundation/communication/cross_chain_calls.md @@ -73,7 +73,7 @@ In a logical sense, a Message Box functions as a one-way message passing mechani - At some point, a rollup will be executed, in this step messages are "moved" from pending on Domain A, to ready on Domain B. Note that consuming the message is "pulling & deleting" (or nullifying). The action is atomic, so a message that is consumed from the pending set MUST be added to the ready set, or the state transition should fail. A further constraint on moving messages along the way, is that only messages where the `sender` and `recipient` pair exists in a leaf in the contracts tree are allowed! - When the message have been added to the ready set, the `recipient` can consume the message as part of a function call. -Something that might seem weird when comparing to other cross-chain setups, is that we are "pulling" messages, and that the message don't need to be calldata for a function call. For _Arbitrum_ and the like, execution is happening FROM the "message bridge", which then calls the L1 contract. For us, you call the L1 contract, and it should then consume messages from the message box. +Something that might seem weird when compared to other cross-chain setups, is that we are "pulling" messages, and that the message don't need to be calldata for a function call. For _Arbitrum_ and the like, execution is happening FROM the "message bridge", which then calls the L1 contract. For us, you call the L1 contract, and it should then consume messages from the message box. Why? _Privacy_! When pushing, we would be needing full `calldata`. Which for functions with private inputs is not really something we want as that calldata for L1 -> L2 transactions are committed to on L1, e.g., publicly sharing the inputs to a private function. By instead pulling, we can have the "message" be something that is derived from the arguments instead. This way, a private function to perform second half of a deposit, could leak the "value" deposited and "who" made the deposit (as this is done on L1), but the new owner can be hidden on L2. @@ -107,7 +107,7 @@ For the sake of cross-chain messages, this means inserting and nullifying L1 $\r ### Messages -While a message could theoretically be arbitrary long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. +While a message could theoretically be arbitrarily long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. In this manner, there is some way to "unpack" the content on the receiving domain. The message that is passed along, require the `sender/recipient` pair to be communicated as well (we need to know who should receive the message and be able to check). By having the pending messages be a contract on L1, we can ensure that the `sender = msg.sender` and let only `content` and `recipient` be provided by the caller. Summing up, we can use the struct's seen below, and only store the commitment (`sha256(LxToLyMsg)`) on chain or in the trees, this way, we need only update a single storage slot per message. @@ -159,7 +159,7 @@ The following diagram shows the overall architecture, combining the earlier sect As mentioned earlier, there will be a link between L1 and L2 contracts (with the L1 part of the link being the portal contract), this link is created at "birth" when the contract leaf is inserted. However, the specific requirements of the link is not yet fully decided. And we will outline a few options below. -The reasoning behind having a link, comes from the difficulty of L2 access control (see "A note on L2 access control"). By having a link that only allows 1 contract (specified at deployment) to send messages to the L2 contract makes this issue "go away" from the application developers point of view as the message could only come from the specified contract. The complexity is moved to the protocol layer, that must now ensure that messages to the L2 contract are only sent from the specified L1 contract. +The reasoning behind having a link, comes from the difficulty of L2 access control (see "A note on L2 access control"). By having a link that only allows 1 contract (specified at deployment) to send messages to the L2 contract makes this issue "go away" from the application developers point of view as the message could only come from the specified contract. The complexity is moved to the protocol layer, which must now ensure that messages to the L2 contract are only sent from the specified L1 contract. :::info The design space for linking L1 and L2 contracts is still open, and we are looking into making access control more efficient to use in the models. @@ -179,7 +179,7 @@ From the L2 contract receiving messages, this model is very similar to the 1:1, When the L1 contract can itself handle where messages are coming from (it could before as well but useless as only 1 address could send), we don't need to worry about it being in only a single pair. The circuits can therefore simply insert the contract leafs without requiring it to ensure that neither have been used before. -With many L2's reading from the same L1, we can also more easily setup generic bridges (with many assets) living in a single L1 contract but minting multiple L2 assets, as the L1 contract can handle the access control and the L2's simply point to it as the portal. This reduces complexity of the L2 contracts as all access control is handled by the L1 contract. +With many L2's reading from the same L1, we can also more easily setup generic bridges (with many assets) living in a single L1 contract but minting multiple L2 assets, as the L1 contract can handle the access control and the L2's simply point to it as the portal. This reduces the complexity of the L2 contracts as all access control is handled by the L1 contract. ## Open Questions diff --git a/docs/docs/concepts/foundation/communication/main.md b/docs/docs/concepts/foundation/communication/main.md index 1f85585dacc..99588c9bc7c 100644 --- a/docs/docs/concepts/foundation/communication/main.md +++ b/docs/docs/concepts/foundation/communication/main.md @@ -2,8 +2,8 @@ title: Contract Communication --- -This section will walk over communication types that behaves differently than normal function calls from. +This section will walk over communication types that behaves differently than normal function calls. Namely, if functions are in different domains, private vs. public, their execution behaves a little differently to what you might expect! See [Private <--> Public execution](./public_private_calls/main.md). -Likewise, executing a function on a different domain than its origin needs a bit extra thought. See [L1 <--> L2 communication](./cross_chain_calls.md). \ No newline at end of file +Likewise, executing a function on a different domain than its origin needs a bit extra thought. See [L1 <--> L2 communication](./cross_chain_calls.md). diff --git a/docs/docs/concepts/foundation/transactions.md b/docs/docs/concepts/foundation/transactions.md index 7e44497ae07..9575ab4d6db 100644 --- a/docs/docs/concepts/foundation/transactions.md +++ b/docs/docs/concepts/foundation/transactions.md @@ -15,13 +15,13 @@ On this page you'll learn: See [this diagram](https://raw.githubusercontent.com/AztecProtocol/aztec-packages/2fa143e4d88b3089ebbe2a9e53645edf66157dc8/docs/static/img/sandbox_sending_a_tx.svg) for an in-depth overview of the transaction execution process. It highlights 3 different types of transaction execution: contract deployments, private transactions and public transactions. -See the page on [contract communication](./communication/main.md) for more context on transactions execution. +See the page on [contract communication](./communication/main.md) for more context on transaction execution. ## Enabling Transaction Semantics: The Aztec Kernel There are two kernel circuits in Aztec, the private kernel and the public kernel. Each circuit validates the correct execution of a particular function call. -A transaction is built up by generating proofs for multiple recursive iterations of kernel circuits. Each call in the call stack is modelled as new iteration of the kernel circuit and are managed by a [FIFO]() queue containing pending function calls. There are two call stacks, one for private calls and one for public calls. +A transaction is built up by generating proofs for multiple recursive iterations of kernel circuits. Each call in the call stack is modeled as new iteration of the kernel circuit and are managed by a [FIFO]() queue containing pending function calls. There are two call stacks, one for private calls and one for public calls. One iteration of a kernel circuit will pop a call off of the stack and execute the call. If the call triggers subsequent contract calls, these are pushed onto the stack. diff --git a/docs/docs/dev_docs/cli/cli-commands.md b/docs/docs/dev_docs/cli/cli-commands.md index 2cfcad2b278..929e1f9803f 100644 --- a/docs/docs/dev_docs/cli/cli-commands.md +++ b/docs/docs/dev_docs/cli/cli-commands.md @@ -67,7 +67,7 @@ Let's double check that the accounts have been registered with the sandbox using #include_code get-accounts yarn-project/end-to-end/src/cli_docs_sandbox.test.ts bash -You will see a that a number of accounts exist that we did not create. The Sandbox initializes itself with 3 default accounts. Save one of the printed accounts (not the one that you generated above) in an environment variable. We will use it later. +You will see that a number of accounts exist that we did not create. The Sandbox initializes itself with 3 default accounts. Save one of the printed accounts (not the one that you generated above) in an environment variable. We will use it later. ```bash export ADDRESS2= diff --git a/docs/docs/dev_docs/contracts/portals/inbox.md b/docs/docs/dev_docs/contracts/portals/inbox.md index dad45a81583..ea8a3609830 100644 --- a/docs/docs/dev_docs/contracts/portals/inbox.md +++ b/docs/docs/dev_docs/contracts/portals/inbox.md @@ -16,7 +16,7 @@ Sends a message from L1 to L2. | Name | Type | Description | | -------------- | ------- | ----------- | | Recipient | `L2Actor` | The recipient of the message. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Deadline | `uint256` | The message consumption deadline. If the message have not been removed from the `Inbox` and included in a rollup block by this point, it can be *cancelled* by the portal (the portal must implement logic to cancel). | +| Deadline | `uint256` | The message consumption deadline. If the message have not been removed from the `Inbox` and included in a rollup block by this point, it can be *canceled* by the portal (the portal must implement logic to cancel). | | Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field for rollup purposes. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) | | Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use the [`computeMessageSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec.js/src/utils/secrets.ts) to compute it from a secret. | | Fee (msg.value) | `uint256` | The fee to the sequencer for including the message. This is the amount of ETH that the sequencer will receive for including the message. Note that only values that can fit in `uint64` will be accepted | @@ -110,4 +110,4 @@ Computes the hash of a message. | Name | Type | Description | | -------------- | ------- | ----------- | | `_message` | `L1ToL2Msg` | The message to compute hash for | -| ReturnValue | `bytes32` | The hash of the message | \ No newline at end of file +| ReturnValue | `bytes32` | The hash of the message | diff --git a/docs/docs/dev_docs/contracts/portals/main.md b/docs/docs/dev_docs/contracts/portals/main.md index e636d99bd63..63273257843 100644 --- a/docs/docs/dev_docs/contracts/portals/main.md +++ b/docs/docs/dev_docs/contracts/portals/main.md @@ -5,7 +5,7 @@ description: Documentation of Aztec's Portals and Cross-chain communication. ## What is a portal -A portal is the point of contact between L1 and a specific contract on Aztec. For applications such as token bridges, this is the point where the tokens are are held on L1 while used in L2. +A portal is the point of contact between L1 and a specific contract on Aztec. For applications such as token bridges, this is the point where the tokens are held on L1 while used in L2. As outlined in the [foundational concepts](../../../concepts/foundation/communication/cross_chain_calls.md), an Aztec L2 contract is linked to _ONE_ L1 address at time of deployment (specified by the developer). This L1 address is the only address that can send messages to that specific L2 contract, and the only address that can receive messages sent from the L2 contract to L1. Note, that a portal don't actually need to be a contract, it could be any address on L1. We say that an Aztec contract is attached to a portal. @@ -20,7 +20,7 @@ When sending messages, we need to specify quite a bit of information beyond just | Name | Type | Description | | ----------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | Recipient | `L2Actor` | The message recipient. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Deadline | `uint256` | The deadline for the message to be consumed. If the message has not been removed from the `Inbox` and included in a rollup block by this point, it can be _cancelled_ by the portal (the portal must implement logic to cancel). | +| Deadline | `uint256` | The deadline for the message to be consumed. If the message has not been removed from the `Inbox` and included in a rollup block by this point, it can be _canceled_ by the portal (the portal must implement logic to cancel). | | Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) | | Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use the [`computeMessageSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec.js/src/utils/secrets.ts) to compute it from a secret. | | Fee | `uint64` | The fee to the sequencer for including the message. This is the amount of ETH that the sequencer will receive for including the message. Note that it is not a full `uint256` but only `uint64` | @@ -154,7 +154,7 @@ Error handling for cross chain messages is handled by the application contract a A special type of error is an underpriced transaction - it means that a message is inserted on L1, but the attached fee is too low to be included in a rollup block. -For the case of token bridges, this could lead to funds being locked in the bridge forever, as funds are locked but the message never arrives on L2 to mint the tokens. To address this, the `Inbox` supports cancelling messages after a deadline. However, this must be called by the portal itself, as it will need to "undo" the state changes is made (for example by sending the tokens back to the user). +For the case of token bridges, this could lead to funds being locked in the bridge forever, as funds are locked but the message never arrives on L2 to mint the tokens. To address this, the `Inbox` supports canceling messages after a deadline. However, this must be called by the portal itself, as it will need to "undo" the state changes is made (for example by sending the tokens back to the user). As this requires logic on the portal itself, it is not something that the protocol can enforce. It must be supported by the application builder when building the portal. @@ -189,7 +189,7 @@ bytes memory message = abi.encodeWithSignature( This way, the message can be consumed by the portal contract, but only if the caller is the designated caller. By being a bit clever when specifying the designated caller, we can ensure that the calls are done in the correct order. For the Uniswap example, say that we have token portals implemented as we have done throughout this page, and a Uniswap portal implementing the designated caller. -We require that the Uniswap portal is the caller of the withdraw, and that the uniswap portal implementation is executing the withdraw before the swap. +We require that the Uniswap portal is the caller of the withdrawal, and that the uniswap portal implementation is executing the withdrawal before the swap. The order of execution can be constrained in the contract. Since all of the messages are emitted to L1 in the same transaction, we can leverage transaction atomicity to ensure success of failure of all messages. Note, that crossing the L1/L2 chasm is asynchronous, so there could be a situation where the user has burned their assets on L2 but the swap fails on L1! This could be due to major price movements or the like. In such a case, the user could be stuck with funds on L1 that they cannot get back to L2 unless the portal contract implements a way to properly handle such errors. diff --git a/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md b/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md index 2f2c0d38074..4ec336a0282 100644 --- a/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md +++ b/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md @@ -114,7 +114,7 @@ This function computes the message hash, and then forwards the call to the more ### Utilities for public calls -Very similar to above, we have variations that work in the public domain. These functions are wrapped to give a similar flow for both cases, but behind the scenes the logic of the account contracts is slightly different since they cannot use the oracle as they are not in the private domain. +Very similar to the above, we have variations that work in the public domain. These functions are wrapped to give a similar flow for both cases, but behind the scenes the logic of the account contracts is slightly different since they cannot use the oracle as they are not in the private domain. #### Example diff --git a/docs/docs/dev_docs/contracts/resources/common_patterns/main.md b/docs/docs/dev_docs/contracts/resources/common_patterns/main.md index a75e128fa6e..9cbbf541f8d 100644 --- a/docs/docs/dev_docs/contracts/resources/common_patterns/main.md +++ b/docs/docs/dev_docs/contracts/resources/common_patterns/main.md @@ -79,7 +79,7 @@ Let's say you have some storage in public and want to move them into the private So you have to create a custom note in the public domain that is not encrypted by some owner - we call such notes a "TransparentNote" since it is created in public, anyone can see the amount and the note is not encrypted by some owner. -This pattern discussed in detail in [writing a token contract section in the shield() method](../../../tutorials/writing_token_contract.md#shield) and [redeem_shield() method](../../../tutorials/writing_token_contract.md#redeem_shield). +This pattern is discussed in detail in [writing a token contract section in the shield() method](../../../tutorials/writing_token_contract.md#shield) and [redeem_shield() method](../../../tutorials/writing_token_contract.md#redeem_shield). ### Discovering my notes When you send someone a note, the note hash gets added to the [note hash tree](../../../../concepts/advanced/data_structures/trees#note-hash-tree). To spend the note, the receiver needs to get the note itself (the note hash preimage). There are two ways you can get a hold of your notes: diff --git a/docs/docs/dev_docs/contracts/syntax/functions.md b/docs/docs/dev_docs/contracts/syntax/functions.md index 334aaa1401b..8548be6afb6 100644 --- a/docs/docs/dev_docs/contracts/syntax/functions.md +++ b/docs/docs/dev_docs/contracts/syntax/functions.md @@ -286,10 +286,10 @@ The kernel can then check that all of the values passed to each circuit in a fun **Returning the context to the kernel.** #include_code context-example-return /yarn-project/noir-contracts/src/contracts/docs_example_contract/src/main.nr rust -Just as the kernel passes information into the the app circuits, the application must return information about the executed app back to the kernel. This is done through a rigid structure we call the `PrivateCircuitPublicInputs`. +Just as the kernel passes information into the app circuits, the application must return information about the executed app back to the kernel. This is done through a rigid structure we call the `PrivateCircuitPublicInputs`. > _Why is it called the `PrivateCircuitPublicInputs`_ -> It is commonly asked why the return values of a function in a circuit are labelled as the `Public Inputs`. Common intuition from other programming paradigms suggests that the return values and public inputs should be distinct. +> It is commonly asked why the return values of a function in a circuit are labeled as the `Public Inputs`. Common intuition from other programming paradigms suggests that the return values and public inputs should be distinct. > However; In the eyes of the circuit, anything that is publicly viewable (or checkable) is a public input. Hence in this case, the return values are also public inputs. This structure contains a host of information about the executed program. It will contain any newly created nullifiers, any messages to be sent to l2 and most importantly it will contain the actual return values of the function! diff --git a/docs/docs/dev_docs/contracts/syntax/globals.md b/docs/docs/dev_docs/contracts/syntax/globals.md index 1a50ac54066..1778c75e56d 100644 --- a/docs/docs/dev_docs/contracts/syntax/globals.md +++ b/docs/docs/dev_docs/contracts/syntax/globals.md @@ -39,7 +39,7 @@ context.timestamp(); ``` ### Block Number -The block number is an sequential identifier that labels each individual block of the network. This value will be the block number of the block the accessing transaction is included in. +The block number is a sequential identifier that labels each individual block of the network. This value will be the block number of the block the accessing transaction is included in. The block number of the genesis block will be 1, with the number increasing by 1 for every block after. ```rust @@ -49,4 +49,4 @@ context.block_number(); :::info *Why do the available global variables differ per execution environment?* The global variables are constrained by the proving environment. In the case of public functions, they are executed on a sequencer that will know the timestamp and number of the next block ( as they are the block producer ). In the case of private functions, we cannot be sure which block our transaction will be included in, hence we can not guarantee values for the timestamp or block number. -::: \ No newline at end of file +::: diff --git a/docs/docs/dev_docs/contracts/syntax/storage/main.md b/docs/docs/dev_docs/contracts/syntax/storage/main.md index b647cc6ff50..da66458882f 100644 --- a/docs/docs/dev_docs/contracts/syntax/storage/main.md +++ b/docs/docs/dev_docs/contracts/syntax/storage/main.md @@ -286,7 +286,7 @@ As part of the initialization of the `Storage` struct, the `Singleton` is create ### `initialize` -As mention, the Singleton is initialized to create the first note and value. +As mentioned, the Singleton is initialized to create the first note and value. When this function is called, a nullifier of the storage slot is created, preventing this Singleton from being initialized again. If an `owner` is specified, the nullifier will be hashed with the owner's secret key. It's crucial to provide an owner if the Singleton is associated with an account. Initializing it without an owner may inadvertently reveal important information about the owner's intention. @@ -416,7 +416,7 @@ An example of how to use this operation is visible in the `easy_private_state`: This function returns the notes the account has access to. -The kernel circuits are constrained to a maximum number of notes this function can return at a time. Check [here](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) and look for `MAX_READ_REQUESTS_PER_CALL` for the up-to-date number. +The kernel circuits are constrained to a maximum number of notes this function can return at a time. Check [here](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) and look for `MAX_READ_REQUESTS_PER_CALL` for the up-to-date number. Because of this limit, we should always consider using the second argument `NoteGetterOptions` to limit the number of notes we need to read and constrain in our programs. This is quite important as every extra call increases the time used to prove the program and we don't want to spend more time than necessary. @@ -430,7 +430,7 @@ Functionally similar to [`get_notes`](#get_notes), but executed unconstrained an #include_code view_notes /yarn-project/aztec-nr/value-note/src/balance_utils.nr rust -There's also a limit on the maximum number of notes that can be returned in one go. To find the current limit, refer to [this file](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) and look for `MAX_NOTES_PER_PAGE`. +There's also a limit on the maximum number of notes that can be returned in one go. To find the current limit, refer to [this file](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) and look for `MAX_NOTES_PER_PAGE`. The key distinction is that this method is unconstrained. It does not perform a check to verify if the notes actually exist, which is something the [`get_notes`](#get_notes) method does under the hood. Therefore, it should only be used in an unconstrained contract function. diff --git a/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md b/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md index 17e49df55ca..351b17bd100 100644 --- a/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md +++ b/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md @@ -53,7 +53,7 @@ Where the `map_slot` is the slot specified in `Storage::init`, recall: #include_code storage_balances_init yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust -And `to` is the actor who receives the note, `amount` of the note and `randomness` is the randomness used to make the note hiding. Without the `randomness` the note could could just as well be plaintext (computational cost of a preimage attack would be trivial in such a case). +And `to` is the actor who receives the note, `amount` of the note and `randomness` is the randomness used to make the note hiding. Without the `randomness` the note could just as well be plaintext (computational cost of a preimage attack would be trivial in such a case). :::info Beware that this hash computation is what the aztec.nr library is doing, and not strictly required by the network (only the kernel computation is). @@ -61,4 +61,4 @@ Beware that this hash computation is what the aztec.nr library is doing, and not With this note structure, the contract can require that only notes sitting at specific storage slots can be used by specific operations, e.g., if transferring funds from `from` to `to`, the notes to destroy should be linked to `H(map_slot, from)` and the new notes (except the change-note) should be linked to `H(map_slot, to)`. -That way, we can have logical storage slots, without them really existing. This means that knowing the storage slot for a note is not enough to actually figure out what is in there (whereas it would be for looking up public state). \ No newline at end of file +That way, we can have logical storage slots, without them really existing. This means that knowing the storage slot for a note is not enough to actually figure out what is in there (whereas it would be for looking up public state). diff --git a/docs/docs/dev_docs/debugging/sandbox-errors.md b/docs/docs/dev_docs/debugging/sandbox-errors.md index c7c2879eef3..f95925a4da2 100644 --- a/docs/docs/dev_docs/debugging/sandbox-errors.md +++ b/docs/docs/dev_docs/debugging/sandbox-errors.md @@ -160,7 +160,7 @@ Circuits work by having a fixed size array. As such, we have limits on how many - too many transient read requests in one tx - too many transient read request membership witnesses in one tx -You can have a look at our current constants/limitations in [constants.nr](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) +You can have a look at our current constants/limitations in [constants.nr](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) #### 7008 - MEMBERSHIP_CHECK_FAILED diff --git a/docs/docs/dev_docs/limitations/main.md b/docs/docs/dev_docs/limitations/main.md index c697898a6f7..bc28d3fd480 100644 --- a/docs/docs/dev_docs/limitations/main.md +++ b/docs/docs/dev_docs/limitations/main.md @@ -189,7 +189,7 @@ Due to the rigidity of zk-SNARK circuits, there are upper bounds on the amount o Here are the current constants: -#include_code constants /yarn-project/aztec-nr/aztec/src/constants_gen.nr rust +#include_code constants /yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr rust #### What are the consequences? diff --git a/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md b/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md index a541e3abf5d..24ba1d18515 100644 --- a/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md +++ b/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md @@ -75,7 +75,7 @@ We are using various utils within the Aztec library: * `state_vars::{ map::Map, public_state::PublicState, }` - we will use a Map to store the votes (key = voteId, value = number of votes), and PublicState to hold our public values that we mentioned earlier * `types::type_serialization::{..}` - various serialization methods for defining how to use these types * `types::address::{AztecAddress},` - our admin will be held as an address -* `constants_gen::EMPTY_NULLIFIED_COMMITMENT,` - this will come in useful when creating our nullifier +* `constants::EMPTY_NULLIFIED_COMMITMENT,` - this will come in useful when creating our nullifier ## Set up storage diff --git a/docs/docs/dev_docs/tutorials/writing_token_contract.md b/docs/docs/dev_docs/tutorials/writing_token_contract.md index 43f6f1e4b3d..e15d6daf548 100644 --- a/docs/docs/dev_docs/tutorials/writing_token_contract.md +++ b/docs/docs/dev_docs/tutorials/writing_token_contract.md @@ -210,7 +210,7 @@ Just below the contract definition, add the following imports: #include_code imports /yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust -We are importing the Option type, items from the `value_note` library to help manage private value storage, note utilities, context (for managing private and public execution contexts), `state_vars` for helping manage state, `types` for data manipulation and `oracle` for help passing data from the private to public execution context. We also import the `auth` [library](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/aztec/src/auth.nr) to handle token authorizations from [Account Contracts](../../concepts/foundation/accounts/main). Check out the Account Contract with AuthWitness [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/noir-contracts/src/contracts/schnorr_single_key_account_contract/src/main.nr). +We are importing the Option type, note utilities, context (for managing private and public execution contexts), `state_vars` for helping manage state, `compute_selector` for helping with calling public functions from private functions, and `types` for data manipulation. We also import the `authwit` [library](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/aztec/src/auth.nr) to handle token authorizations from [Account Contracts](../../concepts/foundation/accounts/main). Check out the Account Contract with AuthWitness [here](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/noir-contracts/src/contracts/schnorr_single_key_account_contract/src/main.nr). [SafeU120](https://github.com/AztecProtocol/aztec-packages/blob/#include_aztec_version/yarn-project/aztec-nr/safe-math/src/safe_u120.nr) is a library to do safe math operations on unsigned integers that protects against overflows and underflows. @@ -313,7 +313,7 @@ First, storage is initialized. Then it checks whether the calling contract (`con ##### Authorizing token spends -If the `msg_sender` is **NOT** the same as the account to debit from, the function checks that the account has authorized the `msg_sender` contract to debit tokens on its behalf. This check is done by computing the function selector that needs to be authorized (in this case, the `shield` function), computing the hash of the message that the account contract has approved. This is a hash of the contract that is approved to spend (`context.msg_sender`), the token contract that can be spent from (`context.this_address()`), the `selector`, the account to spend from (`from.address`), the `amount`, the `secret_hash` and a `nonce` to prevent multiple spends. This hash is passed to `assert_valid_public_message_for` to ensure that the Account Contract has approved tokens to be spent on it's behalf. +If the `msg_sender` is **NOT** the same as the account to debit from, the function checks that the account has authorized the `msg_sender` contract to debit tokens on its behalf. This check is done by computing the function selector that needs to be authorized (in this case, the `shield` function), computing the hash of the message that the account contract has approved. This is a hash of the contract that is approved to spend (`context.msg_sender`), the token contract that can be spent from (`context.this_address()`), the `selector`, the account to spend from (`from.address`), the `amount`, the `secret_hash` and a `nonce` to prevent multiple spends. This hash is passed to `assert_current_call_valid_authwit_public` to ensure that the Account Contract has approved tokens to be spent on it's behalf. If the `msg_sender` is the same as the account to debit tokens from, the authorization check is bypassed and the function proceeds to update the account's `public_balance` and adds a new `TransparentNote` to the `pending_shields`. @@ -364,7 +364,7 @@ The function returns `1` to indicate successful execution. This private function enables un-shielding of private `ValueNote`s stored in `balances` to any Aztec account's `public_balance`. -After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_valid_message_for` is modified to work specifically in the private context. After the authorization check, the sender's private balance is decreased using the `decrement` helper function for the `value_note` library. Then it stages a public function call on this contract ([`_increase_public_balance`](#_increase_public_balance)) to be executed in the [public execution phase](#execution-contexts) of transaction execution. `_increase_public_balance` is marked as an `internal` function, so can only be called by this token contract. +After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_current_call_valid_authwit` is modified to work specifically in the private context. After the authorization check, the sender's private balance is decreased using the `decrement` helper function for the `value_note` library. Then it stages a public function call on this contract ([`_increase_public_balance`](#_increase_public_balance)) to be executed in the [public execution phase](#execution-contexts) of transaction execution. `_increase_public_balance` is marked as an `internal` function, so can only be called by this token contract. The function returns `1` to indicate successful execution. @@ -374,7 +374,7 @@ The function returns `1` to indicate successful execution. This private function enables private token transfers between Aztec accounts. -After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_valid_message_for` is modified to work specifically in the private context. After authorization, the function gets the current balances for the sender and recipient and decrements and increments them, respectively, using the `value_note` helper functions. +After initializing storage, the function checks that the `msg_sender` is authorized to spend tokens. See [the Authorizing token spends section](#authorizing-token-spends) above for more detail--the only difference being that `assert_current_call_valid_authwit` is modified to work specifically in the private context. After authorization, the function gets the current balances for the sender and recipient and decrements and increments them, respectively, using the `value_note` helper functions. #include_code transfer /yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust diff --git a/docs/scripts/build.sh b/docs/scripts/build.sh index e06ae5f2aa9..eb63a05887d 100755 --- a/docs/scripts/build.sh +++ b/docs/scripts/build.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eo pipefail # Helper function for building packages in yarn project @@ -10,7 +10,7 @@ build_package() { (cd "yarn-project/$package_name" && $build_command) } -# Build script. If run on Netlify, first it needs to compile all yarn-projects +# Build script. If run on Netlify, first it needs to compile all yarn-projects # that are involved in typedoc in order to generate their type information. if [ -n "$NETLIFY" ]; then # Move to project root diff --git a/iac/mainnet-fork/scripts/run_nginx_anvil.sh b/iac/mainnet-fork/scripts/run_nginx_anvil.sh index 4b701f13256..38788424ed7 100755 --- a/iac/mainnet-fork/scripts/run_nginx_anvil.sh +++ b/iac/mainnet-fork/scripts/run_nginx_anvil.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eum pipefail diff --git a/iac/mainnet-fork/scripts/wait_for_fork b/iac/mainnet-fork/scripts/wait_for_fork index 326582c25af..831e06723e2 100755 --- a/iac/mainnet-fork/scripts/wait_for_fork +++ b/iac/mainnet-fork/scripts/wait_for_fork @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # When destroying and applying mainnet fork terraform, it may not be diff --git a/l1-contracts/Dockerfile b/l1-contracts/Dockerfile index 73eda3294b3..5544b0eaa7b 100644 --- a/l1-contracts/Dockerfile +++ b/l1-contracts/Dockerfile @@ -1,5 +1,5 @@ # Linting requires node. -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache build-base git python3 curl bash jq WORKDIR /usr/src/l1-contracts COPY . . diff --git a/l1-contracts/bootstrap.sh b/l1-contracts/bootstrap.sh index db2a71b8c75..f776b6072dc 100755 --- a/l1-contracts/bootstrap.sh +++ b/l1-contracts/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/l1-contracts/scripts/ci_deploy_contracts.sh b/l1-contracts/scripts/ci_deploy_contracts.sh index 23be39b18bb..aaedb6f2f7d 100755 --- a/l1-contracts/scripts/ci_deploy_contracts.sh +++ b/l1-contracts/scripts/ci_deploy_contracts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash export ETHEREUM_HOST=https://$DEPLOY_TAG-mainnet-fork.aztec.network:8545/$FORK_API_KEY @@ -26,7 +26,7 @@ docker run \ ./scripts/deploy_contracts.sh # Write the contract addresses as terraform variables -for KEY in ROLLUP_CONTRACT_ADDRESS REGISTRY_CONTRACT_ADDRESS INBOX_CONTRACT_ADDRESS OUTBOX_CONTRACT_ADDRESS; do +for KEY in ROLLUP_CONTRACT_ADDRESS REGISTRY_CONTRACT_ADDRESS INBOX_CONTRACT_ADDRESS OUTBOX_CONTRACT_ADDRESS CONTRACT_DEPLOYMENT_EMITTER_ADDRESS; do VALUE=$(jq -r .$KEY ./serve/contract_addresses.json) export TF_VAR_$KEY=$VALUE done diff --git a/l1-contracts/scripts/deploy_contracts.sh b/l1-contracts/scripts/deploy_contracts.sh index 6223a0719a9..0bd743e1567 100755 --- a/l1-contracts/scripts/deploy_contracts.sh +++ b/l1-contracts/scripts/deploy_contracts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Sets up defaults then runs the E2E Setup script to perform contract deployments diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 436e18e84b2..e033fc779a5 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -56,13 +56,15 @@ library Constants { uint256 internal constant NOTE_HASH_SUBTREE_HEIGHT = 7; uint256 internal constant NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH = 25; uint256 internal constant NULLIFIER_SUBTREE_HEIGHT = 7; - uint256 internal constant BLOCKS_TREE_HEIGHT = 16; + uint256 internal constant ARCHIVE_HEIGHT = 16; uint256 internal constant NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH = 13; uint256 internal constant L1_TO_L2_MSG_SUBTREE_HEIGHT = 4; uint256 internal constant L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH = 12; uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4; uint256 internal constant MAPPING_SLOT_PEDERSEN_SEPARATOR = 4; uint256 internal constant NUM_FIELDS_PER_SHA256 = 2; + uint256 internal constant ARGS_HASH_CHUNK_LENGTH = 32; + uint256 internal constant ARGS_HASH_CHUNK_COUNT = 16; uint256 internal constant L1_TO_L2_MESSAGE_LENGTH = 8; uint256 internal constant L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH = 26; uint256 internal constant MAX_NOTE_FIELDS_LENGTH = 20; diff --git a/l1-contracts/src/core/libraries/Decoder.sol b/l1-contracts/src/core/libraries/Decoder.sol index 110522190b1..c1c0e4a828e 100644 --- a/l1-contracts/src/core/libraries/Decoder.sol +++ b/l1-contracts/src/core/libraries/Decoder.sol @@ -35,8 +35,8 @@ import {Hash} from "./Hash.sol"; * | 0x00ec | 0x20 | startPublicDataTreeRoot * | 0x010c | 0x20 | startL1ToL2MessageTreeSnapshot.root * | 0x012c | 0x04 | startL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0130 | 0x20 | startBlocksTreeSnapshot.root - * | 0x0150 | 0x04 | startBlocksTreeSnapshot.nextAvailableLeafIndex + * | 0x0130 | 0x20 | startArchiveSnapshot.root + * | 0x0150 | 0x04 | startArchiveSnapshot.nextAvailableLeafIndex * | 0x0154 | 0x20 | endNoteHashTreeSnapshot.root * | 0x0174 | 0x04 | endNoteHashTreeSnapshot.nextAvailableLeafIndex * | 0x0178 | 0x20 | endNullifierTreeSnapshot.root @@ -46,8 +46,8 @@ import {Hash} from "./Hash.sol"; * | 0x01c0 | 0x20 | endPublicDataTreeRoot * | 0x01e0 | 0x20 | endL1ToL2MessageTreeSnapshot.root * | 0x0200 | 0x04 | endL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0204 | 0x20 | endBlocksTreeSnapshot.root - * | 0x0224 | 0x04 | endBlocksTreeSnapshot.nextAvailableLeafIndex + * | 0x0204 | 0x20 | endArchiveSnapshot.root + * | 0x0224 | 0x04 | endArchiveSnapshot.nextAvailableLeafIndex * | 0x0228 | 0x04 | len(newCommitments) (denoted a) * | 0x022c | a * 0x20 | newCommitments * | 0x022c + a * 0x20 | 0x04 | len(newNullifiers) (denoted b) diff --git a/noir/.github/actions/docs/build-status/script.sh b/noir/.github/actions/docs/build-status/script.sh index 0b282557cf2..2e86de6c173 100755 --- a/noir/.github/actions/docs/build-status/script.sh +++ b/noir/.github/actions/docs/build-status/script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash BRANCH_NAME=$(echo "$BRANCH_NAME" | sed -e "s#refs/[^/]*/##") DEPLOY_STATUS=$(curl -X GET "https://api.netlify.com/api/v1/sites/$SITE_ID/deploys?branch=$BRANCH_NAME" | jq -r '.[] | select(.created_at != null) | .state' | head -1) diff --git a/noir/Dockerfile b/noir/Dockerfile index ac818cb8bd2..de63983db3e 100644 --- a/noir/Dockerfile +++ b/noir/Dockerfile @@ -1,15 +1,10 @@ -FROM rust:alpine3.17 -RUN apk update \ - && apk upgrade \ - && apk add --no-cache \ - build-base \ - bash +FROM rust:bookworm WORKDIR /usr/src/noir COPY . . RUN ./scripts/bootstrap_native.sh -# When running the container, mount the current working directory to /project. -FROM alpine:3.17 +# When running the container, mount the users home directory to /root +FROM ubuntu:lunar COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo -WORKDIR /project +WORKDIR /root ENTRYPOINT ["/usr/src/noir/target/release/nargo"] \ No newline at end of file diff --git a/noir/bootstrap.sh b/noir/bootstrap.sh index bf672ac0ad2..5ebe7ade090 100755 --- a/noir/bootstrap.sh +++ b/noir/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0") diff --git a/noir/compiler/integration-tests/scripts/codegen-verifiers.sh b/noir/compiler/integration-tests/scripts/codegen-verifiers.sh index 13667038728..4518141fc13 100644 --- a/noir/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/noir/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash self_path=$(dirname "$(readlink -f "$0")") diff --git a/noir/scripts/bootstrap_native.sh b/noir/scripts/bootstrap_native.sh index 693a9d9678e..b5e004106ff 100755 --- a/noir/scripts/bootstrap_native.sh +++ b/noir/scripts/bootstrap_native.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/scripts/bootstrap_packages.sh b/noir/scripts/bootstrap_packages.sh index 5fce2675037..1363acf51a6 100755 --- a/noir/scripts/bootstrap_packages.sh +++ b/noir/scripts/bootstrap_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/scripts/install_wasm-bindgen.sh b/noir/scripts/install_wasm-bindgen.sh index 5e9f9127506..c6e85bac50b 100755 --- a/noir/scripts/install_wasm-bindgen.sh +++ b/noir/scripts/install_wasm-bindgen.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/noir/test_programs/gates_report.sh b/noir/test_programs/gates_report.sh index e06e6812e9d..4192c581376 100755 --- a/noir/test_programs/gates_report.sh +++ b/noir/test_programs/gates_report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # These tests are incompatible with gas reporting diff --git a/noir/test_programs/rebuild.sh b/noir/test_programs/rebuild.sh index dfc3dc5c967..fd3e4478d62 100755 --- a/noir/test_programs/rebuild.sh +++ b/noir/test_programs/rebuild.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e process_dir() { diff --git a/scripts/ci/assemble_e2e_benchmark.sh b/scripts/ci/assemble_e2e_benchmark.sh index 7456d4e1cbc..4a7b1bbe015 100755 --- a/scripts/ci/assemble_e2e_benchmark.sh +++ b/scripts/ci/assemble_e2e_benchmark.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Grabs the log files uploaded in yarn-project/end-to-end/scripts/upload_logs_to_s3.sh # that contain representative benchmarks, extracts whatever metrics are interesting, # and assembles a single file that shows the current state of the repository. @@ -70,7 +70,7 @@ if [ -n "${BENCHMARK_LATEST_FILE:-}" ]; then aws s3 cp $BENCHMARK_FILE_JSON "s3://${BUCKET_NAME}/${BENCHMARK_LATEST_FILE}" fi -# If on a pull request, get the data from the most recent commit on master where it's available, +# If on a pull request, get the data from the most recent commit on master where it's available, # generate a markdown comment, and post it on the pull request if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then MASTER_COMMIT_HASH=$(curl -s "https://api.github.com/repos/AztecProtocol/aztec-packages/pulls/${CIRCLE_PULL_REQUEST##*/}" | jq -r '.base.sha') @@ -88,11 +88,9 @@ if [ -n "${CIRCLE_PULL_REQUEST:-}" ]; then done set -e - if [ -z "${BASE_COMMIT_HASH:-}" ]; then + if [ -z "${BASE_COMMIT_HASH:-}" ]; then echo "No base commit data found" fi (yarn-project/scripts/run_script.sh workspace @aztec/scripts bench-comment && echo "commented on pr $CIRCLE_PULL_REQUEST") || echo "failed commenting on pr" fi - - diff --git a/scripts/ci/store_test_benchmark_logs b/scripts/ci/store_test_benchmark_logs index 3554aa746d2..c9ae07fc96a 100755 --- a/scripts/ci/store_test_benchmark_logs +++ b/scripts/ci/store_test_benchmark_logs @@ -1,5 +1,5 @@ -#!/bin/bash -# Script for storing barretenberg benchmark results. +#!/usr/bin/env bash +# Script for storing barretenberg benchmark results. # Uploads results to the AztecProtocol/benchmark-archive repository. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace @@ -36,7 +36,7 @@ cat /tmp/csv/new.csv \ # If there actually were any logs, update the information in the benchmark repository if [ -s /tmp/csv/trimmed.csv ]; then cd /tmp - + git clone --depth 1 https://$AZTEC_GITHUB_TOKEN:@github.com/AztecProtocol/benchmark-archive cd benchmark-archive diff --git a/scripts/git-subrepo/note/init-test b/scripts/git-subrepo/note/init-test index a20854de88a..810dae14a4d 100755 --- a/scripts/git-subrepo/note/init-test +++ b/scripts/git-subrepo/note/init-test @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex cat $0 # Show this script in the output diff --git a/scripts/git-subrepo/note/recreate-rebase-conflict.sh b/scripts/git-subrepo/note/recreate-rebase-conflict.sh index 8e1103e575d..6dc0a51e300 100644 --- a/scripts/git-subrepo/note/recreate-rebase-conflict.sh +++ b/scripts/git-subrepo/note/recreate-rebase-conflict.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu set -x diff --git a/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash b/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash index df8b818cee6..9118236848a 100755 --- a/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash +++ b/scripts/git-subrepo/note/subtree-rebase-fail-example/test.bash @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex @@ -24,6 +24,6 @@ git clone repo1 repo3 git subrepo clone ../repo2 subrepo bash git rebase -i HEAD^ - git log -p + git log -p ls ) diff --git a/scripts/git-subrepo/note/test-subrepo-push.sh b/scripts/git-subrepo/note/test-subrepo-push.sh index afceb5efa92..13b76e47fc8 100644 --- a/scripts/git-subrepo/note/test-subrepo-push.sh +++ b/scripts/git-subrepo/note/test-subrepo-push.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -ex diff --git a/scripts/git-subrepo/note/test.sh b/scripts/git-subrepo/note/test.sh index fae278795d1..d27548f0937 100755 --- a/scripts/git-subrepo/note/test.sh +++ b/scripts/git-subrepo/note/test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -x diff --git a/scripts/git_subrepo.sh b/scripts/git_subrepo.sh index df9743a2bd5..969789cdcc8 100755 --- a/scripts/git_subrepo.sh +++ b/scripts/git_subrepo.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu SCRIPT_DIR=$(dirname "$(realpath "$0")") @@ -11,10 +11,10 @@ fi # git subrepo is quite nice, but has one flaw in our workflow: # We frequently squash commits in PRs, and we might update the .gitrepo file -# with a parent commit that later does not exist. +# with a parent commit that later does not exist. # A backup heuristic is used to later find the squashed commit's parent -# using the .gitrepo file's git history. This might be brittle -# in the face of e.g. a .gitrepo whitespace change, but it's a fallback, +# using the .gitrepo file's git history. This might be brittle +# in the face of e.g. a .gitrepo whitespace change, but it's a fallback, # we only have this issue in master, and the file should only be edited # generally by subrepo commands. SUBREPO_PATH="${2:-}" diff --git a/scripts/migrate_barretenberg_branch.sh b/scripts/migrate_barretenberg_branch.sh index 8cf396867c3..36c128a6232 100755 --- a/scripts/migrate_barretenberg_branch.sh +++ b/scripts/migrate_barretenberg_branch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu # Usage: ./this.sh diff --git a/scripts/tmux-splits b/scripts/tmux-splits index 296cdb36632..6b1e28f8559 100755 --- a/scripts/tmux-splits +++ b/scripts/tmux-splits @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Launches tmux with 1 window that has 2 panes running, from top to bottom: # - aztec.js # - a shell in end-to-end. If a test is given as an argument, the test will be run here automatically. diff --git a/yarn-project/.dockerignore b/yarn-project/.dockerignore index c227696aa75..f913d69056b 100644 --- a/yarn-project/.dockerignore +++ b/yarn-project/.dockerignore @@ -7,11 +7,10 @@ */data* **/dest -**/*.tsbuildinfo -**/Dockerfile* **/node_modules -Dockerfile* +**/Dockerfile* +**/*.tsbuildinfo noir-contracts/src/types noir-contracts/src/artifacts -noir-contracts/target \ No newline at end of file +noir-contracts/target diff --git a/yarn-project/.yarnrc.yml b/yarn-project/.yarnrc.yml index 843129bf681..3491bc0f4b5 100644 --- a/yarn-project/.yarnrc.yml +++ b/yarn-project/.yarnrc.yml @@ -5,3 +5,7 @@ plugins: spec: '@yarnpkg/plugin-workspace-tools' yarnPath: .yarn/releases/yarn-3.6.3.cjs + +logFilters: + - code: YN0013 + level: discard diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index 99f3e771900..696c60fbe1c 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -6,15 +6,13 @@ # Any subsequent build steps needed to support downstream containers should be done in those containers build files. FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-base as builder -RUN apk add bash perl - # Copy in the entire workspace. COPY . . -RUN yarn workspace @aztec/foundation build && \ - yarn workspace @aztec/noir-compiler build && \ - yarn workspace @aztec/noir-contracts noir:build:all && \ - yarn workspace @aztec/noir-protocol-circuits noir:build && \ - yarn tsc -b +RUN yarn workspace @aztec/foundation build +RUN yarn workspace @aztec/noir-compiler build +RUN yarn workspace @aztec/noir-contracts noir:build:all +RUN yarn workspace @aztec/noir-protocol-circuits noir:build +RUN yarn tsc -b ENTRYPOINT ["yarn"] diff --git a/yarn-project/Dockerfile.prod b/yarn-project/Dockerfile.prod index 82c21c1755c..16f65db4740 100644 --- a/yarn-project/Dockerfile.prod +++ b/yarn-project/Dockerfile.prod @@ -3,8 +3,8 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS yarn-project # Need new arch specific image. -FROM node:18-alpine AS builder -RUN apk add bash jq --no-cache +FROM node:18.19.0 AS builder +RUN apt update && apt install -y jq && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=yarn-project /usr/src /usr/src WORKDIR /usr/src/yarn-project ARG COMMIT_TAG="" @@ -18,5 +18,32 @@ RUN yarn workspaces focus @aztec/cli @aztec/aztec-sandbox @aztec/aztec-faucet -- RUN rm -rf /usr/src/noir/target # Create fresh minimal size image. -FROM node:18-alpine -COPY --from=builder /usr/src /usr/src \ No newline at end of file +# Installs our specific version of node, stripping out the unnecessary. +# We could probably just apt install nodejs, but it's both a different version, and seemingly a bit slower. +# We could also use distroless, to get us about 20mb off, but meh. It's actually useful to shell into containers. +#FROM gcr.io/distroless/nodejs18-debian12 +FROM ubuntu:lunar +# RUN apt update && apt install -y nodejs && rm -rf /var/lib/apt/lists/* && apt-get clean +RUN apt update && apt install -y curl && rm -rf /var/lib/apt/lists/* && apt-get clean +ENV NODE_VERSION=18.19.0 +RUN ARCH= && \ + dpkgArch="$(dpkg --print-architecture)" && \ + case "${dpkgArch##*-}" in \ + amd64) ARCH='x64';; \ + arm64) ARCH='arm64';; \ + *) echo "unsupported architecture"; exit 1 ;; \ + esac && \ + curl -fsSLO --compressed "https://nodejs.org/dist/v$NODE_VERSION/node-v$NODE_VERSION-linux-$ARCH.tar.gz" && \ + tar zxf "node-v$NODE_VERSION-linux-$ARCH.tar.gz" -C /usr --strip-components=1 --no-same-owner \ + --exclude "*/share/*" \ + --exclude "*/bin/corepack" \ + --exclude "*/bin/npx" \ + --exclude "*/bin/npm" \ + --exclude "*/corepack/*" \ + --exclude "*/npm/man/*" \ + --exclude "*/npm/docs/*" \ + --exclude "*/include/*" && \ + rm "node-v$NODE_VERSION-linux-$ARCH.tar.gz" && \ + node --version +COPY --from=builder /usr/src /usr/src +ENTRYPOINT ["/usr/bin/node"] \ No newline at end of file diff --git a/yarn-project/acir-simulator/src/acvm/serialize.ts b/yarn-project/acir-simulator/src/acvm/serialize.ts index abf78f63c1b..afb88c62fc2 100644 --- a/yarn-project/acir-simulator/src/acvm/serialize.ts +++ b/yarn-project/acir-simulator/src/acvm/serialize.ts @@ -111,7 +111,7 @@ export function toACVMBlockHeader(blockHeader: BlockHeader): ACVMField[] { toACVMField(blockHeader.nullifierTreeRoot), toACVMField(blockHeader.contractTreeRoot), toACVMField(blockHeader.l1ToL2MessagesTreeRoot), - toACVMField(blockHeader.blocksTreeRoot), + toACVMField(blockHeader.archiveRoot), toACVMField(blockHeader.publicDataTreeRoot), toACVMField(blockHeader.globalVariablesHash), ]; diff --git a/yarn-project/acir-simulator/src/client/view_data_oracle.ts b/yarn-project/acir-simulator/src/client/view_data_oracle.ts index f744e54dd6c..8a1c2258737 100644 --- a/yarn-project/acir-simulator/src/client/view_data_oracle.ts +++ b/yarn-project/acir-simulator/src/client/view_data_oracle.ts @@ -106,7 +106,7 @@ export class ViewDataOracle extends TypedOracle { block.endNullifierTreeSnapshot.root, block.endContractTreeSnapshot.root, block.endL1ToL2MessagesTreeSnapshot.root, - block.endBlocksTreeSnapshot.root, + block.endArchiveSnapshot.root, new Fr(0), // TODO(#3441) privateKernelVkTreeRoot is not present in L2Block and it's not yet populated in noir block.endPublicDataTreeRoot, computeGlobalsHash(block.globalVariables), diff --git a/yarn-project/aztec-node/src/aztec-node/db.ts b/yarn-project/aztec-node/src/aztec-node/db.ts index 35ebf1b86b6..9b5be428781 100644 --- a/yarn-project/aztec-node/src/aztec-node/db.ts +++ b/yarn-project/aztec-node/src/aztec-node/db.ts @@ -12,8 +12,8 @@ import { AztecNodeConfig } from './config.js'; export const createMemDown = () => (memdown as any)() as MemDown; export const createLevelDown = (path: string) => (leveldown as any)(path) as LevelDown; -const DB_SUBDIR = 'aztec-node'; -const WORLD_STATE_SUBDIR = 'aztec-world-state'; +const DB_SUBDIR = 'aztec-node-db'; +const WORLD_STATE_SUBDIR = 'aztec-world-state-db'; const NODE_METADATA_KEY = '@@aztec_node_metadata'; /** diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 8ec02fe8668..18f3b5e4865 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1,6 +1,6 @@ import { Archiver, LMDBArchiverStore } from '@aztec/archiver'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, BlockHeader, CONTRACT_TREE_HEIGHT, Fr, @@ -8,6 +8,7 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + NullifierLeafPreimage, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; import { computeGlobalsHash, computePublicDataTreeIndex } from '@aztec/circuits.js/abis'; @@ -391,12 +392,12 @@ export class AztecNodeService implements AztecNode { * @param leafIndex - Index of the leaf in the tree. * @returns The sibling path. */ - public async getBlocksTreeSiblingPath( + public async getArchiveSiblingPath( blockNumber: number | 'latest', leafIndex: bigint, - ): Promise> { + ): Promise> { const committedDb = await this.#getWorldState(blockNumber); - return committedDb.getSiblingPath(MerkleTreeId.BLOCKS_TREE, leafIndex); + return committedDb.getSiblingPath(MerkleTreeId.ARCHIVE, leafIndex); } /** @@ -429,19 +430,19 @@ export class AztecNodeService implements AztecNode { return undefined; } - const leafDataPromise = db.getLeafData(MerkleTreeId.NULLIFIER_TREE, Number(index)); + const leafPreimagePromise = db.getLeafPreimage(MerkleTreeId.NULLIFIER_TREE, index); const siblingPathPromise = db.getSiblingPath( MerkleTreeId.NULLIFIER_TREE, BigInt(index), ); - const [leafData, siblingPath] = await Promise.all([leafDataPromise, siblingPathPromise]); + const [leafPreimage, siblingPath] = await Promise.all([leafPreimagePromise, siblingPathPromise]); - if (!leafData) { + if (!leafPreimage) { return undefined; } - return new NullifierMembershipWitness(BigInt(index), leafData, siblingPath); + return new NullifierMembershipWitness(BigInt(index), leafPreimage as NullifierLeafPreimage, siblingPath); } /** @@ -463,22 +464,21 @@ export class AztecNodeService implements AztecNode { nullifier: Fr, ): Promise { const committedDb = await this.#getWorldState(blockNumber); - const { index, alreadyPresent } = await committedDb.getPreviousValueIndex( - MerkleTreeId.NULLIFIER_TREE, - nullifier.toBigInt(), - ); + const findResult = await committedDb.getPreviousValueIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBigInt()); + if (!findResult) { + return undefined; + } + const { index, alreadyPresent } = findResult; if (alreadyPresent) { this.log.warn(`Nullifier ${nullifier.toBigInt()} already exists in the tree`); } - const leafData = await committedDb.getLeafData(MerkleTreeId.NULLIFIER_TREE, index); - if (!leafData) { - return undefined; - } + const preimageData = (await committedDb.getLeafPreimage(MerkleTreeId.NULLIFIER_TREE, index))!; + const siblingPath = await committedDb.getSiblingPath( MerkleTreeId.NULLIFIER_TREE, BigInt(index), ); - return new NullifierMembershipWitness(BigInt(index), leafData, siblingPath); + return new NullifierMembershipWitness(BigInt(index), preimageData as NullifierLeafPreimage, siblingPath); } /** @@ -506,15 +506,14 @@ export class AztecNodeService implements AztecNode { const committedDb = await this.#getWorldState('latest'); const getTreeRoot = async (id: MerkleTreeId) => Fr.fromBuffer((await committedDb.getTreeInfo(id)).root); - const [noteHashTree, nullifierTree, contractTree, l1ToL2MessagesTree, blocksTree, publicDataTree] = - await Promise.all([ - getTreeRoot(MerkleTreeId.NOTE_HASH_TREE), - getTreeRoot(MerkleTreeId.NULLIFIER_TREE), - getTreeRoot(MerkleTreeId.CONTRACT_TREE), - getTreeRoot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), - getTreeRoot(MerkleTreeId.BLOCKS_TREE), - getTreeRoot(MerkleTreeId.PUBLIC_DATA_TREE), - ]); + const [noteHashTree, nullifierTree, contractTree, l1ToL2MessagesTree, archive, publicDataTree] = await Promise.all([ + getTreeRoot(MerkleTreeId.NOTE_HASH_TREE), + getTreeRoot(MerkleTreeId.NULLIFIER_TREE), + getTreeRoot(MerkleTreeId.CONTRACT_TREE), + getTreeRoot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), + getTreeRoot(MerkleTreeId.ARCHIVE), + getTreeRoot(MerkleTreeId.PUBLIC_DATA_TREE), + ]); return { [MerkleTreeId.CONTRACT_TREE]: contractTree, @@ -522,7 +521,7 @@ export class AztecNodeService implements AztecNode { [MerkleTreeId.NULLIFIER_TREE]: nullifierTree, [MerkleTreeId.PUBLIC_DATA_TREE]: publicDataTree, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: l1ToL2MessagesTree, - [MerkleTreeId.BLOCKS_TREE]: blocksTree, + [MerkleTreeId.ARCHIVE]: archive, }; } @@ -539,7 +538,7 @@ export class AztecNodeService implements AztecNode { roots[MerkleTreeId.NULLIFIER_TREE], roots[MerkleTreeId.CONTRACT_TREE], roots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - roots[MerkleTreeId.BLOCKS_TREE], + roots[MerkleTreeId.ARCHIVE], Fr.ZERO, roots[MerkleTreeId.PUBLIC_DATA_TREE], globalsHash, diff --git a/yarn-project/aztec-node/src/bin/index.ts b/yarn-project/aztec-node/src/bin/index.ts index fee7f7808dc..7d265c0f2f6 100644 --- a/yarn-project/aztec-node/src/bin/index.ts +++ b/yarn-project/aztec-node/src/bin/index.ts @@ -2,8 +2,6 @@ import { createDebugLogger } from '@aztec/foundation/log'; import http from 'http'; -import Koa from 'koa'; -import Router from 'koa-router'; import { AztecNodeConfig, AztecNodeService, createAztecNodeRpcServer, getConfigEnvVars } from '../index.js'; @@ -20,19 +18,6 @@ async function createAndDeployAztecNode() { return await AztecNodeService.createAndSync(aztecNodeConfig); } -/** - * Creates a router for helper API endpoints of the Private eXecution Environment (PXE). - * @param apiPrefix - The prefix to use for all api requests - * @returns - The router for handling status requests. - */ -export function createStatusRouter(apiPrefix: string) { - const router = new Router({ prefix: `${apiPrefix}` }); - router.get('/status', (ctx: Koa.Context) => { - ctx.status = 200; - }); - return router; -} - /** * Create and start a new Aztec Node HTTP Server */ @@ -52,9 +37,6 @@ async function main() { const rpcServer = createAztecNodeRpcServer(aztecNode); const app = rpcServer.getApp(API_PREFIX); - const apiRouter = createStatusRouter(API_PREFIX); - app.use(apiRouter.routes()); - app.use(apiRouter.allowedMethods()); const httpServer = http.createServer(app.callback()); httpServer.listen(+AZTEC_NODE_PORT); diff --git a/yarn-project/aztec-node/terraform/main.tf b/yarn-project/aztec-node/terraform/main.tf index 4c5cdcb47df..3ca143b2f33 100644 --- a/yarn-project/aztec-node/terraform/main.tf +++ b/yarn-project/aztec-node/terraform/main.tf @@ -62,6 +62,7 @@ locals { "/dns4/${var.DEPLOY_TAG}-aztec-bootstrap-${i + 1}.local/tcp/${var.BOOTNODE_LISTEN_PORT + i}/p2p/${local.bootnode_ids[i]}" ] combined_bootnodes = join(",", local.bootnodes) + data_dir = "/usr/src/yarn-project/aztec-sandbox/data" } resource "aws_cloudwatch_log_group" "aztec-node-log-group" { @@ -101,9 +102,38 @@ resource "aws_service_discovery_service" "aztec-node" { } } +# Configure an EFS filesystem. +resource "aws_efs_file_system" "node_data_store" { + count = local.node_count + creation_token = "${var.DEPLOY_TAG}-node-${count.index + 1}-data" + throughput_mode = "provisioned" + provisioned_throughput_in_mibps = 20 + + tags = { + Name = "${var.DEPLOY_TAG}-node-data" + } + + lifecycle_policy { + transition_to_ia = "AFTER_14_DAYS" + } +} + +resource "aws_efs_mount_target" "private_az1" { + count = local.node_count + file_system_id = aws_efs_file_system.node_data_store[count.index].id + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id + security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] +} + +resource "aws_efs_mount_target" "private_az2" { + count = local.node_count + file_system_id = aws_efs_file_system.node_data_store[count.index].id + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id + security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] +} + # Define task definitions for each node. resource "aws_ecs_task_definition" "aztec-node" { - # for_each = var.node_keys count = local.node_count family = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" requires_compatibilities = ["FARGATE"] @@ -112,7 +142,15 @@ resource "aws_ecs_task_definition" "aztec-node" { memory = "4096" execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn task_role_arn = data.terraform_remote_state.aztec2_iac.outputs.cloudwatch_logging_ecs_role_arn - container_definitions = <" && exit 1 diff --git a/yarn-project/aztec-nr/.gitrepo b/yarn-project/aztec-nr/.gitrepo index fdb38b0c912..6fc3eca0c44 100644 --- a/yarn-project/aztec-nr/.gitrepo +++ b/yarn-project/aztec-nr/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/aztec-nr branch = master - commit = ff4766aa4136db4cf8981a222061d1e3e91e2604 + commit = a3aa6bd5d8f2ad9d44e3d9340550203fbe38c4d6 method = merge cmdver = 0.4.6 - parent = 9f682cb8cf37eb392c4979f62fdec7126fb4d102 + parent = 9c5443651faaed3dcb9fae36727337a34ce5922b diff --git a/yarn-project/aztec-nr/authwit/Nargo.toml b/yarn-project/aztec-nr/authwit/Nargo.toml index ab4e80b840a..e2259943251 100644 --- a/yarn-project/aztec-nr/authwit/Nargo.toml +++ b/yarn-project/aztec-nr/authwit/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -aztec = { path = "../aztec" } \ No newline at end of file +aztec = { path = "../aztec" } +protocol_types = { path = "../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/aztec-nr/authwit/src/auth.nr b/yarn-project/aztec-nr/authwit/src/auth.nr index 61952330832..bcf2cad5bca 100644 --- a/yarn-project/aztec-nr/authwit/src/auth.nr +++ b/yarn-project/aztec-nr/authwit/src/auth.nr @@ -1,6 +1,6 @@ +use dep::protocol_types::constants::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__SIGNATURE_PAYLOAD}; use dep::aztec::{ context::{PrivateContext, PublicContext, Context}, - constants_gen::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__SIGNATURE_PAYLOAD}, types::address::AztecAddress, abi::hash_args, hash::pedersen_hash, diff --git a/yarn-project/aztec-nr/authwit/src/entrypoint.nr b/yarn-project/aztec-nr/authwit/src/entrypoint.nr index 227be7d17fc..aac0f72f66c 100644 --- a/yarn-project/aztec-nr/authwit/src/entrypoint.nr +++ b/yarn-project/aztec-nr/authwit/src/entrypoint.nr @@ -4,7 +4,7 @@ use dep::aztec::hash::pedersen_hash; use dep::aztec::context::PrivateContext; use dep::aztec::private_call_stack_item::PrivateCallStackItem; use dep::aztec::public_call_stack_item::PublicCallStackItem; -use dep::aztec::constants_gen::GENERATOR_INDEX__SIGNATURE_PAYLOAD; +use dep::protocol_types::constants::GENERATOR_INDEX__SIGNATURE_PAYLOAD; global ACCOUNT_MAX_CALLS: Field = 4; // 1 (ARGS_HASH) + 1 (FUNCTION_SELECTOR) + 1 (TARGET_ADDRESS) + 1 (IS_PUBLIC) diff --git a/yarn-project/aztec-nr/aztec/Nargo.toml b/yarn-project/aztec-nr/aztec/Nargo.toml index ab5ef97e2be..8fb5e972533 100644 --- a/yarn-project/aztec-nr/aztec/Nargo.toml +++ b/yarn-project/aztec-nr/aztec/Nargo.toml @@ -1,7 +1,8 @@ [package] name = "noir_aztec" -authors = [""] +authors = ["aztec-labs"] compiler_version = ">=0.18.0" type = "lib" -[dependencies] \ No newline at end of file +[dependencies] +protocol_types = { path = "../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/aztec-nr/aztec/src/abi.nr b/yarn-project/aztec-nr/aztec/src/abi.nr index 88923767db7..b484842dc2d 100644 --- a/yarn-project/aztec-nr/aztec/src/abi.nr +++ b/yarn-project/aztec-nr/aztec/src/abi.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::{ +use dep::protocol_types::constants::{ RETURN_VALUES_LENGTH, MAX_READ_REQUESTS_PER_CALL, MAX_PENDING_READ_REQUESTS_PER_CALL, @@ -147,7 +147,7 @@ struct BlockHeader { nullifier_tree_root : Field, contract_tree_root : Field, l1_to_l2_messages_tree_root : Field, - blocks_tree_root: Field, + archive_root: Field, public_data_tree_root: Field, global_variables_hash: Field, } @@ -161,7 +161,7 @@ impl BlockHeader { self.nullifier_tree_root, self.contract_tree_root, self.l1_to_l2_messages_tree_root, - self.blocks_tree_root, + self.archive_root, self.public_data_tree_root, self.global_variables_hash, ] @@ -173,14 +173,14 @@ impl BlockHeader { nullifier_tree_root: deserialized[1], contract_tree_root: deserialized[2], l1_to_l2_messages_tree_root: deserialized[3], - blocks_tree_root: deserialized[4], + archive_root: deserialized[4], public_data_tree_root: deserialized[5], global_variables_hash: deserialized[6], } } pub fn empty() -> Self { - Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 } + Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 } } pub fn block_hash(self) -> Field { diff --git a/yarn-project/aztec-nr/aztec/src/address.nr b/yarn-project/aztec-nr/aztec/src/address.nr index f1f4224513c..d569f33eea5 100644 --- a/yarn-project/aztec-nr/aztec/src/address.nr +++ b/yarn-project/aztec-nr/aztec/src/address.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::GENERATOR_INDEX__CONTRACT_ADDRESS; +use dep::protocol_types::constants::GENERATOR_INDEX__CONTRACT_ADDRESS; use crate::hash::pedersen_hash; pub fn compute_address(pub_key_x: Field, pub_key_y: Field, partial_address: Field) -> Field { diff --git a/yarn-project/aztec-nr/aztec/src/context.nr b/yarn-project/aztec-nr/aztec/src/context.nr index 720ce9981fb..c7609942d4c 100644 --- a/yarn-project/aztec-nr/aztec/src/context.nr +++ b/yarn-project/aztec-nr/aztec/src/context.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::{ +use dep::protocol_types::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, @@ -295,7 +295,7 @@ impl PrivateContext { nullifier_tree_root : fields[146], contract_tree_root : fields[147], l1_to_l2_messages_tree_root : fields[148], - blocks_tree_root : fields[149], + archive_root : fields[149], public_data_tree_root: fields[150], global_variables_hash: fields[151], }, diff --git a/yarn-project/aztec-nr/aztec/src/hash.nr b/yarn-project/aztec-nr/aztec/src/hash.nr index f8d3abb51aa..eb9fb57aff0 100644 --- a/yarn-project/aztec-nr/aztec/src/hash.nr +++ b/yarn-project/aztec-nr/aztec/src/hash.nr @@ -1,5 +1,5 @@ use dep::std::hash::{pedersen_hash_with_separator, sha256}; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ GENERATOR_INDEX__SIGNATURE_PAYLOAD, GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET, }; diff --git a/yarn-project/aztec-nr/aztec/src/lib.nr b/yarn-project/aztec-nr/aztec/src/lib.nr index d1080f9056f..99c087b0075 100644 --- a/yarn-project/aztec-nr/aztec/src/lib.nr +++ b/yarn-project/aztec-nr/aztec/src/lib.nr @@ -1,6 +1,5 @@ mod abi; mod address; -mod constants_gen; mod context; mod hash; mod log; diff --git a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr index 2e2a254577c..0be0fc4b3ac 100644 --- a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr +++ b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::{ +use dep::protocol_types::constants::{ L1_TO_L2_MESSAGE_LENGTH, GENERATOR_INDEX__NULLIFIER, GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET, diff --git a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr index 396d758006a..fbf194d76d1 100644 --- a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr +++ b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr @@ -1,5 +1,5 @@ use crate::messaging::l1_to_l2_message::L1ToL2Message; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MESSAGE_LENGTH, }; diff --git a/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr b/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr index f44d8ab31ec..b36d7e3fc06 100644 --- a/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr +++ b/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr @@ -9,7 +9,7 @@ use crate::note::{ utils::compute_inner_note_hash, }; use crate::oracle::notes::{notify_created_note, notify_nullified_note}; -use crate::constants_gen::EMPTY_NULLIFIED_COMMITMENT; +use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; pub fn create_note( context: &mut PrivateContext, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_getter.nr b/yarn-project/aztec-nr/aztec/src/note/note_getter.nr index 1e5131a22ae..3827e4641e7 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_getter.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_getter.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ MAX_READ_REQUESTS_PER_CALL, GET_NOTE_ORACLE_RETURN_LENGTH, GET_NOTES_ORACLE_RETURN_LENGTH, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr b/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr index c43fd996850..67dabe8593d 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr @@ -1,6 +1,6 @@ use dep::std::option::Option; use crate::types::vec::BoundedVec; -use crate::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; struct Select { field_index: u8, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_hash.nr b/yarn-project/aztec-nr/aztec/src/note/note_hash.nr index 4e3618fa650..dc2c6c2c8b3 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_hash.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_hash.nr @@ -1,5 +1,5 @@ use crate::hash::pedersen_hash; -use crate::constants_gen::{GENERATOR_INDEX__UNIQUE_COMMITMENT, GENERATOR_INDEX__SILOED_COMMITMENT}; +use dep::protocol_types::constants::{GENERATOR_INDEX__UNIQUE_COMMITMENT, GENERATOR_INDEX__SILOED_COMMITMENT}; pub fn compute_inner_hash(storage_slot: Field, note_hash: Field) -> Field { // TODO(#1205) Do we need a generator index here? diff --git a/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr b/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr index 709237c4437..15d445d2c02 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::MAX_NOTES_PER_PAGE; +use dep::protocol_types::constants::MAX_NOTES_PER_PAGE; use crate::note::note_getter_options::{Select, Sort}; use crate::types::vec::BoundedVec; diff --git a/yarn-project/aztec-nr/aztec/src/note/utils.nr b/yarn-project/aztec-nr/aztec/src/note/utils.nr index cf40eaeba9b..685c565d538 100644 --- a/yarn-project/aztec-nr/aztec/src/note/utils.nr +++ b/yarn-project/aztec-nr/aztec/src/note/utils.nr @@ -1,5 +1,5 @@ +use dep::protocol_types::constants::GENERATOR_INDEX__OUTER_NULLIFIER; use crate::{ - constants_gen::GENERATOR_INDEX__OUTER_NULLIFIER, note::{ note_hash::{compute_inner_hash, compute_siloed_hash, compute_unique_hash}, note_header::NoteHeader, diff --git a/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr b/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr index a5ab1eb95f3..ac687b1d2a3 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::CALL_PRIVATE_FUNCTION_RETURN_SIZE; +use dep::protocol_types::constants::CALL_PRIVATE_FUNCTION_RETURN_SIZE; #[oracle(callPrivateFunction)] fn call_private_function_oracle(_contract_address: Field, _function_selector: Field, _args_hash: Field) -> [Field; CALL_PRIVATE_FUNCTION_RETURN_SIZE] {} diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr index 5f453221952..9d15d693393 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr @@ -1,10 +1,11 @@ use dep::std::merkle::compute_merkle_root; +use dep::protocol_types::constants::{ + BLOCK_HEADER_LENGTH, + ARCHIVE_HEIGHT, +}; + use crate::{ abi::BlockHeader, - constants_gen::{ - BLOCK_HEADER_LENGTH, - BLOCKS_TREE_HEIGHT, - }, context::PrivateContext, oracle::get_membership_witness::{ get_membership_witness, @@ -27,12 +28,18 @@ pub fn get_block_header(block_number: Field, context: PrivateContext) -> BlockHe // 2) Compute the block hash from the block header let block_hash = block_header.block_hash(); - // 3) Get the membership wintess of the block in the blocks tree - let blocks_tree_id = 5; // TODO(#3443) - let witness: MembershipWitness = get_membership_witness(block_number, blocks_tree_id, block_hash); + // 3) Get the membership witness of the block in the archive + let archive_id = 5; // TODO(#3443) + + // Using `block_number` here for path is incorrect and it will break if we pass in an incorrect block number on input. + // Instead here should be the block number corresponding to `context.block_header.blocks_tree_root` + // This is not currently available in private context. See issue #3564 + let path_block_number = block_number; + + let witness: MembershipWitness = get_membership_witness(path_block_number, archive_id, block_hash); - // 4) Check that the block is in the blocks tree (i.e. the witness is valid) - assert(context.block_header.blocks_tree_root == compute_merkle_root(block_hash, witness.index, witness.path), "Proving membership of a block in blocks tree failed"); + // 4) Check that the block is in the archive (i.e. the witness is valid) + assert(context.block_header.archive_root == compute_merkle_root(block_hash, witness.index, witness.path), "Proving membership of a block in archive failed"); // 5) Return the block header block_header diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr index 358511db402..27f6722ae1b 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH; +use dep::protocol_types::constants::L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH; // Checks if a msg is within the l1ToL2Msg tree #[oracle(getL1ToL2Message)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr index 5122e685d3e..1f43ae52f57 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NOTE_HASH_TREE_HEIGHT; +use dep::protocol_types::constants::NOTE_HASH_TREE_HEIGHT; use crate::utils::arr_copy_slice; // Note: We have M here because we need to somehow set it when calling get_membership_witness function and one way to diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr index 64d073d42cb..2cc493d7966 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NULLIFIER_TREE_HEIGHT; +use dep::protocol_types::constants::NULLIFIER_TREE_HEIGHT; use crate::utils::arr_copy_slice; use crate::hash::pedersen_hash; diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr index 076a4748d69..7fbe0936997 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NOTE_HASH_TREE_HEIGHT; +use dep::protocol_types::constants::NOTE_HASH_TREE_HEIGHT; use crate::utils::arr_copy_slice; #[oracle(getSiblingPath)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/logs.nr b/yarn-project/aztec-nr/aztec/src/oracle/logs.nr index caab7b2a13c..6690243c9f5 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/logs.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/logs.nr @@ -1,5 +1,5 @@ use crate::types::point::Point; -use crate::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::protocol_types::constants::NUM_FIELDS_PER_SHA256; // TODO: Should take encrypted data. #[oracle(emitEncryptedLog)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr b/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr index cda65354018..e8190c49dd3 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; #[oracle(callPublicFunction)] fn call_public_function_oracle(_contract_address: Field, _function_selector: Field, _args_hash: Field) -> [Field; RETURN_VALUES_LENGTH] {} diff --git a/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr b/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr index 9742106c152..73fedb091ff 100644 --- a/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr +++ b/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr @@ -1,6 +1,6 @@ use crate::abi::FunctionData; use crate::abi::PrivateCircuitPublicInputs; -use crate::constants_gen::GENERATOR_INDEX__CALL_STACK_ITEM; +use dep::protocol_types::constants::GENERATOR_INDEX__CALL_STACK_ITEM; use crate::hash::pedersen_hash; struct PrivateCallStackItem { diff --git a/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr b/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr index 6c557b9aa57..734d7ea7ee4 100644 --- a/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr +++ b/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr @@ -6,7 +6,7 @@ use crate::{ FunctionData, }, }; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ RETURN_VALUES_LENGTH, GENERATOR_INDEX__CALL_STACK_ITEM, }; diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr b/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr index 1084fb5a865..48537a552b9 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::EMPTY_NULLIFIED_COMMITMENT; +use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; use crate::context::{PrivateContext, Context}; use crate::note::{ lifecycle::create_note, diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/set.nr b/yarn-project/aztec-nr/aztec/src/state_vars/set.nr index 66a39382f47..1c9a317fb28 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/set.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/set.nr @@ -1,6 +1,6 @@ use dep::std::option::Option; use crate::abi::PublicContextInputs; -use crate::constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use crate::context::{PrivateContext, PublicContext, Context}; use crate::note::{ lifecycle::{create_note, create_note_hash_from_public, destroy_note}, diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr b/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr index 9f64faf3c82..99ad829f1f5 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__INITIALIZATION_NULLIFIER}; +use dep::protocol_types::constants::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__INITIALIZATION_NULLIFIER}; use crate::context::{PrivateContext, PublicContext, Context}; use crate::note::{ lifecycle::{create_note, destroy_note}, diff --git a/yarn-project/aztec-nr/value-note/Nargo.toml b/yarn-project/aztec-nr/value-note/Nargo.toml index b1f4a276738..7f87db94351 100644 --- a/yarn-project/aztec-nr/value-note/Nargo.toml +++ b/yarn-project/aztec-nr/value-note/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -aztec = { path = "../aztec" } \ No newline at end of file +aztec = { path = "../aztec" } +protocol_types = { path = "../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/aztec-nr/value-note/src/filter.nr b/yarn-project/aztec-nr/value-note/src/filter.nr index 8657358cf10..bd0d3025772 100644 --- a/yarn-project/aztec-nr/value-note/src/filter.nr +++ b/yarn-project/aztec-nr/value-note/src/filter.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use dep::aztec::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use crate::value_note::ValueNote; pub fn filter_notes_min_sum(notes: [Option; MAX_READ_REQUESTS_PER_CALL], min_sum: Field) -> [Option; MAX_READ_REQUESTS_PER_CALL] { diff --git a/yarn-project/aztec-sandbox/Dockerfile b/yarn-project/aztec-sandbox/Dockerfile index 40731ebdcab..afdfb2094e1 100644 --- a/yarn-project/aztec-sandbox/Dockerfile +++ b/yarn-project/aztec-sandbox/Dockerfile @@ -1,5 +1,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod - -WORKDIR /usr/src/yarn-project/aztec-sandbox -ENTRYPOINT ["yarn", "start"] +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-sandbox/dest/bin/index.js"] EXPOSE 8079 8080 + +# The version has been updated in yarn-project-prod. +# Adding COMMIT_TAG here to rebuild versioned image. +ARG COMMIT_TAG="" \ No newline at end of file diff --git a/yarn-project/aztec-sandbox/src/bin/index.ts b/yarn-project/aztec-sandbox/src/bin/index.ts index 31dde1ce825..72a9413454d 100644 --- a/yarn-project/aztec-sandbox/src/bin/index.ts +++ b/yarn-project/aztec-sandbox/src/bin/index.ts @@ -2,13 +2,17 @@ import { createAztecNodeRpcServer, getConfigEnvVars as getNodeConfigEnvVars } from '@aztec/aztec-node'; import { AccountManager, createAztecNodeClient, deployInitialSandboxAccounts } from '@aztec/aztec.js'; import { NULL_KEY } from '@aztec/ethereum'; +import { init } from '@aztec/foundation/crypto'; +import { createStatusRouter } from '@aztec/foundation/json-rpc/server'; import { createDebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; -import { NoirWasmVersion } from '@aztec/noir-compiler/versions'; +import { NoirCommit } from '@aztec/noir-compiler/versions'; import { BootstrapNode, getP2PConfigEnvVars } from '@aztec/p2p'; import { GrumpkinScalar, PXEService, createPXERpcServer } from '@aztec/pxe'; +import { resolve as dnsResolve } from 'dns'; import { readFileSync } from 'fs'; +import http from 'http'; import { dirname, resolve } from 'path'; import { mnemonicToAccount } from 'viem/accounts'; @@ -27,13 +31,25 @@ enum SandboxMode { P2PBootstrap = 'p2p-bootstrap', } +/** + * If we can successfully resolve 'host.docker.internal', then we are running in a container, and we should treat + * localhost as being host.docker.internal. + */ +function getLocalhost() { + return new Promise(resolve => + dnsResolve('host.docker.internal', err => (err ? resolve('localhost') : resolve('host.docker.internal'))), + ); +} + +const LOCALHOST = await getLocalhost(); const { - AZTEC_NODE_URL = 'http://localhost:8079', + AZTEC_NODE_URL = `http://${LOCALHOST}:8079`, AZTEC_NODE_PORT = 8079, PXE_PORT = 8080, MODE = 'sandbox', TEST_ACCOUNTS = 'true', DEPLOY_AZTEC_CONTRACTS = 'true', + API_PREFIX = '', } = process.env; const logger = createDebugLogger(`aztec:${MODE}`); @@ -76,15 +92,24 @@ async function main() { const mode = MODE as SandboxMode; - const createShutdown = (cb?: () => Promise) => async () => { - logger.info('Shutting down...'); - if (cb) { - await cb(); - } - process.exit(0); + const installSignalHandlers = (cb?: () => Promise) => { + const shutdown = async () => { + logger.info('Shutting down...'); + if (cb) { + await cb(); + } + process.exit(0); + }; + process.removeAllListeners('SIGINT'); + process.removeAllListeners('SIGTERM'); + process.once('SIGINT', shutdown); + process.once('SIGTERM', shutdown); }; - let shutdown: () => Promise; + installSignalHandlers(); + + // Init crypto (bb.js). + await init(); const logStrings = []; @@ -97,12 +122,12 @@ async function main() { // Code path for starting Sandbox if (mode === SandboxMode.Sandbox) { - logger.info(`Setting up Aztec Sandbox v${version} (noir v${NoirWasmVersion}), please stand by...`); + logger.info(`Setting up Aztec Sandbox v${version} (noir ${NoirCommit}), please stand by...`); const { pxe, node, stop, accounts } = await createAndInitialiseSandbox(deployTestAccounts); // Create shutdown cleanup function - shutdown = createShutdown(stop); + installSignalHandlers(stop); // Start Node and PXE JSON-RPC servers startHttpRpcServer(node, createAztecNodeRpcServer, AZTEC_NODE_PORT); @@ -115,7 +140,7 @@ async function main() { const accountLogStrings = await createAccountLogs(accounts, pxe); logStrings.push(...accountLogStrings); } - logStrings.push(`Aztec Sandbox v${version} (noir v${NoirWasmVersion}) is now ready for use!`); + logStrings.push(`Aztec Sandbox v${version} (noir ${NoirCommit}) is now ready for use!`); } else if (mode === SandboxMode.Node) { // Code path for starting Node only const nodeConfig = getNodeConfigEnvVars(); @@ -131,13 +156,22 @@ async function main() { } const node = await createAztecNode(nodeConfig); - shutdown = createShutdown(node.stop); + installSignalHandlers(node.stop); + + const port = process.env.AZTEC_NODE_PORT || 8080; // Use standard 8080 when no PXE is running + const nodeRpcServer = createAztecNodeRpcServer(node); + const app = nodeRpcServer.getApp(); + + // Add a /status endpoint + const statusRouter = createStatusRouter(API_PREFIX); + app.use(statusRouter.routes()); + app.use(statusRouter.allowedMethods()); // Start Node JSON-RPC server - startHttpRpcServer(node, createAztecNodeRpcServer, 8080); // Use standard 8080 when no PXE is running - logStrings.push( - `Aztec Node v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${AZTEC_NODE_PORT}!`, - ); + const httpServer = http.createServer(app.callback()); + httpServer.listen(port); + + logStrings.push(`Aztec Node v${version} (noir ${NoirCommit}) is now ready for use in port ${port}!`); } else if (mode === SandboxMode.PXE) { // Code path for starting PXE only @@ -145,7 +179,7 @@ async function main() { const node = createAztecNodeClient(AZTEC_NODE_URL); const pxe = await createAztecPXE(node); - shutdown = createShutdown(pxe.stop); + installSignalHandlers(pxe.stop); // Start PXE JSON-RPC server startHttpRpcServer(pxe, createPXERpcServer, PXE_PORT); @@ -157,24 +191,20 @@ async function main() { logStrings.push(...accountLogStrings); } - logStrings.push(`PXE v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${PXE_PORT}!`); + logStrings.push(`PXE v${version} (noir ${NoirCommit}) is now ready for use in port ${PXE_PORT}!`); } else if (mode === SandboxMode.P2PBootstrap) { // Code path for starting a P2P bootstrap node const config = getP2PConfigEnvVars(); const bootstrapNode = new BootstrapNode(logger); await bootstrapNode.start(config); - shutdown = createShutdown(bootstrapNode.stop); + installSignalHandlers(bootstrapNode.stop); logStrings.push( `Bootstrap P2P node is now ready for use. Listening on: ${config.tcpListenIp}:${config.tcpListenPort}.`, ); - } else { - shutdown = createShutdown(); } // Log startup details logger.info(`${splash}\n${github}\n\n`.concat(...logStrings)); - process.once('SIGINT', shutdown); - process.once('SIGTERM', shutdown); } /** diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index 637c4dfed82..007f54fbb1b 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -4,9 +4,20 @@ "version": "0.1.0", "type": "module", "exports": { - "node": "./dest/index.js", - "import": "./dest/index.js", - "default": "./dest/main.js" + ".": { + "node": "./dest/index.js", + "import": "./dest/index.js", + "default": "./dest/main.js" + }, + "./interfaces/pxe": "./dest/api/interfaces/pxe.js", + "./abi": "./dest/api/abi.js", + "./aztec_address": "./dest/api/aztec_address.js", + "./eth_address": "./dest/api/eth_address.js", + "./ethereum": "./dest/api/ethereum.js", + "./fields": "./dest/api/fields.js", + "./init": "./dest/api/init.js", + "./log_id": "./dest/api/log_id.js", + "./tx_hash": "./dest/api/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/aztec.js/src/api/README.md b/yarn-project/aztec.js/src/api/README.md new file mode 100644 index 00000000000..473a5bcd826 --- /dev/null +++ b/yarn-project/aztec.js/src/api/README.md @@ -0,0 +1,7 @@ +# API + +This provides a more modular api for importing parts of the library as needed. +The root `index.js` just exposes everything, which can have consequences for startup times and optimizations. +Here we can gradually build up a much more granular api to allow importing precisely what's needed. +This should adopt the opposite philosophy to "export all my child exports". +Every file should (usually) export one thing, and the file/directory structure should be reflected in package.json exports. diff --git a/yarn-project/aztec.js/src/api/abi.ts b/yarn-project/aztec.js/src/api/abi.ts new file mode 100644 index 00000000000..d76502b881d --- /dev/null +++ b/yarn-project/aztec.js/src/api/abi.ts @@ -0,0 +1 @@ +export { ContractArtifact, FunctionArtifact, FunctionSelector } from '@aztec/foundation/abi'; diff --git a/yarn-project/aztec.js/src/api/aztec_address.ts b/yarn-project/aztec.js/src/api/aztec_address.ts new file mode 100644 index 00000000000..c6cece77dae --- /dev/null +++ b/yarn-project/aztec.js/src/api/aztec_address.ts @@ -0,0 +1 @@ +export { AztecAddress } from '@aztec/foundation/aztec-address'; diff --git a/yarn-project/aztec.js/src/api/eth_address.ts b/yarn-project/aztec.js/src/api/eth_address.ts new file mode 100644 index 00000000000..f07492245b8 --- /dev/null +++ b/yarn-project/aztec.js/src/api/eth_address.ts @@ -0,0 +1 @@ +export { EthAddress } from '@aztec/foundation/eth-address'; diff --git a/yarn-project/aztec.js/src/api/ethereum.ts b/yarn-project/aztec.js/src/api/ethereum.ts new file mode 100644 index 00000000000..5be2a7ac37d --- /dev/null +++ b/yarn-project/aztec.js/src/api/ethereum.ts @@ -0,0 +1,6 @@ +export { + deployL1Contract, + deployL1Contracts, + DeployL1Contracts, + L1ContractArtifactsForDeployment, +} from '@aztec/ethereum'; diff --git a/yarn-project/aztec.js/src/api/fields.ts b/yarn-project/aztec.js/src/api/fields.ts new file mode 100644 index 00000000000..6f3f255f748 --- /dev/null +++ b/yarn-project/aztec.js/src/api/fields.ts @@ -0,0 +1 @@ +export { Point, Fr, Fq, GrumpkinScalar } from '@aztec/foundation/fields'; diff --git a/yarn-project/aztec.js/src/api/init.ts b/yarn-project/aztec.js/src/api/init.ts new file mode 100644 index 00000000000..9654b9c8042 --- /dev/null +++ b/yarn-project/aztec.js/src/api/init.ts @@ -0,0 +1,10 @@ +import { init } from '@aztec/foundation/crypto'; + +/** + * This should only be needed to be called in CJS environments that don't have top level await. + * Initializes any asynchronous subsystems required to use the library. + * At time of writing, this is just our foundation crypto lib. + */ +export async function initAztecJs() { + await init(); +} diff --git a/yarn-project/aztec.js/src/api/interfaces/pxe.ts b/yarn-project/aztec.js/src/api/interfaces/pxe.ts new file mode 100644 index 00000000000..3dc49f26d71 --- /dev/null +++ b/yarn-project/aztec.js/src/api/interfaces/pxe.ts @@ -0,0 +1 @@ +export { PXE } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec.js/src/api/log_id.ts b/yarn-project/aztec.js/src/api/log_id.ts new file mode 100644 index 00000000000..6439b39f7d9 --- /dev/null +++ b/yarn-project/aztec.js/src/api/log_id.ts @@ -0,0 +1 @@ +export { LogId } from '@aztec/types/log_id'; diff --git a/yarn-project/aztec.js/src/api/tx_hash.ts b/yarn-project/aztec.js/src/api/tx_hash.ts new file mode 100644 index 00000000000..35ef7f0b387 --- /dev/null +++ b/yarn-project/aztec.js/src/api/tx_hash.ts @@ -0,0 +1 @@ +export { TxHash } from '@aztec/types/tx_hash'; diff --git a/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json b/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json index 491da2af12c..061ae56ba1d 100644 --- a/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json @@ -395,7 +395,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -662,7 +662,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json b/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json index f05dd9664f7..8a0d04a998d 100644 --- a/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json @@ -383,7 +383,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -650,7 +650,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json b/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json index fb155f98ec1..5496e442409 100644 --- a/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json @@ -318,7 +318,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -585,7 +585,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/contract_deployer/index.ts b/yarn-project/aztec.js/src/contract_deployer/index.ts index ef3f20492c4..20add4a47c0 100644 --- a/yarn-project/aztec.js/src/contract_deployer/index.ts +++ b/yarn-project/aztec.js/src/contract_deployer/index.ts @@ -1,2 +1,3 @@ export * from './contract_deployer.js'; export * from './deploy_sent_tx.js'; +export * from './deploy_method.js'; diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index e9e42ae309b..353457cd6f5 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -1,24 +1,83 @@ -export * from './contract/index.js'; -export * from './contract_deployer/index.js'; -export * from './utils/index.js'; -export * from './pxe_client.js'; -export * from './account/index.js'; -export * from './contract_deployer/deploy_method.js'; -export * from './sandbox/index.js'; -export * from './wallet/index.js'; +/** + * This is our public api. + * Do NOT "export * from ..." here. + * Everything here should be explicit, to ensure we can clearly see everything we're exposing to the world. + * If it's exposed, people will use it, and then we can't remove/change the api without breaking client code. + * At the time of writing we overexpose things that should only be internal. + * + * TODO: Review and narrow scope of public api. + * We should also consider exposing subsections of the api via package.json exports, like we do with foundation. + * This can allow consumers to import much smaller parts of the library to incur less overhead. + * It will also allow web bundlers do perform intelligent chunking of bundles etc. + * Some work as been done on this within the api folder, providing the alternative import style of e.g.: + * ```typescript + * import { TxHash } from '@aztec.js/tx_hash' + * import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; + * import { AztecAddress } from '@aztec/aztec.js/aztec_address'; + * import { EthAddress } from '@aztec/aztec.js/eth_address'; + * ``` + * + * TODO: Ultimately reimplement this mega exporter by mega exporting a granular api (then deprecate it). + */ +export { + WaitOpts, + ContractFunctionInteraction, + Contract, + ContractBase, + ContractMethod, + SentTx, + BatchCall, +} from './contract/index.js'; + +export { ContractDeployer, DeployMethod, DeploySentTx } from './contract_deployer/index.js'; + +export { + generatePublicKey, + FieldLike, + EthAddressLike, + computeMessageSecretHash, + CheatCodes, + AztecAddressLike, + isContractDeployed, + EthCheatCodes, + computeAuthWitMessageHash, +} from './utils/index.js'; + +export { createPXEClient } from './pxe_client.js'; + +export { + CompleteAddress, + getSchnorrAccount, + AccountContract, + AccountManager, + getUnsafeSchnorrAccount, + EcdsaAccountContract, + createAccounts, + SchnorrAccountContract, + SingleKeyAccountContract, + createAccount, + AuthWitnessProvider, + BaseAccountContract, +} from './account/index.js'; + +export { waitForSandbox, getSandboxAccountsWallets, deployInitialSandboxAccounts } from './sandbox/index.js'; + +export { AccountWalletWithPrivateKey, AccountWallet, Wallet, SignerlessWallet } from './wallet/index.js'; // TODO https://github.com/AztecProtocol/aztec-packages/issues/2632 --> FunctionSelector might not need to be exposed // here once the issue is resolved. export { AztecAddress, EthAddress, - Point, Fr, + Fq, FunctionSelector, GlobalVariables, GrumpkinScalar, + Point, getContractDeploymentInfo, } from '@aztec/circuits.js'; + export { Grumpkin, Schnorr } from '@aztec/circuits.js/barretenberg'; export { @@ -29,20 +88,21 @@ export { ExtendedContractData, ExtendedNote, FunctionCall, - INITIAL_L2_BLOCK_NUM, GrumpkinPrivateKey, + INITIAL_L2_BLOCK_NUM, L2Actor, L2Block, L2BlockL2Logs, LogFilter, + LogId, LogType, MerkleTreeId, NodeInfo, Note, + PXE, PackedArguments, PartialAddress, PublicKey, - PXE, SyncStatus, Tx, TxExecutionRequest, @@ -50,25 +110,35 @@ export { TxReceipt, TxStatus, UnencryptedL2Log, - emptyFunctionCall, createAztecNodeClient, + emptyFunctionCall, merkleTreeIds, mockTx, } from '@aztec/types'; -export { ContractArtifact } from '@aztec/foundation/abi'; +// TODO: These kinds of things have no place on our public api. +// External devs will almost certainly have their own methods of doing these things. +// If we want to use them in our own "aztec.js consuming code", import them from foundation as needed. +export { ContractArtifact, FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; +export { sha256 } from '@aztec/foundation/crypto'; export { DebugLogger, createDebugLogger, onLog } from '@aztec/foundation/log'; -export { fileURLToPath } from '@aztec/foundation/url'; +export { retry, retryUntil } from '@aztec/foundation/retry'; export { sleep } from '@aztec/foundation/sleep'; export { elapsed } from '@aztec/foundation/timer'; -export { retry, retryUntil } from '@aztec/foundation/retry'; -export * from '@aztec/foundation/crypto'; +export { fileURLToPath } from '@aztec/foundation/url'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +export { makeFetch } from '@aztec/foundation/json-rpc/client'; +export { FieldsOf } from '@aztec/foundation/types'; export { - deployL1Contract, - deployL1Contracts, DeployL1Contracts, L1ContractArtifactsForDeployment, + deployL1Contract, + deployL1Contracts, } from '@aztec/ethereum'; + +// Start of section that exports public api via granular api. +// Here you *can* do `export *` as the granular api defacto exports things explicitly. +// This entire index file will be deprecated at some point after we're satisfied. +export * from './api/init.js'; diff --git a/yarn-project/bootstrap.sh b/yarn-project/bootstrap.sh index 6f9acecd7db..ff01dcb5e80 100755 --- a/yarn-project/bootstrap.sh +++ b/yarn-project/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd "$(dirname "$0")" diff --git a/yarn-project/boxes/blank-react/package.json b/yarn-project/boxes/blank-react/package.json index d8d97c6ba6e..3174826d119 100644 --- a/yarn-project/boxes/blank-react/package.json +++ b/yarn-project/boxes/blank-react/package.json @@ -37,9 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", - "@aztec/types": "workspace:^", "classnames": "^2.3.2", "formik": "^2.4.3", "node-sass": "^9.0.0", diff --git a/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx b/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx index 484a7b10207..66609bd2ab9 100644 --- a/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx +++ b/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx @@ -3,8 +3,7 @@ import { callContractFunction, deployContract, viewContractFunction } from '../. import { convertArgs } from '../../scripts/util.js'; import styles from './contract_function_form.module.scss'; import { Button, Loader } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress, Fr } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, ContractArtifact, Fr, FunctionArtifact } from '@aztec/aztec.js'; import { useFormik } from 'formik'; import * as Yup from 'yup'; diff --git a/yarn-project/boxes/blank-react/src/app/contract.tsx b/yarn-project/boxes/blank-react/src/app/contract.tsx index 3f1bd526956..6dfbc219589 100644 --- a/yarn-project/boxes/blank-react/src/app/contract.tsx +++ b/yarn-project/boxes/blank-react/src/app/contract.tsx @@ -3,8 +3,7 @@ import { Copy } from './components/copy.js'; import { ContractFunctionForm, Popup } from './components/index.js'; import styles from './contract.module.scss'; import { Button, ButtonSize, ButtonTheme, Card, CardTheme, ImageButton, ImageButtonIcon } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress } from '@aztec/aztec.js'; -import { FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, FunctionArtifact } from '@aztec/aztec.js'; import { ReactNode, useState } from 'react'; const functionTypeSortOrder = { diff --git a/yarn-project/boxes/blank-react/src/artifacts/Blank.json b/yarn-project/boxes/blank-react/src/artifacts/Blank.json index cf389c951c9..2e06365e481 100644 --- a/yarn-project/boxes/blank-react/src/artifacts/Blank.json +++ b/yarn-project/boxes/blank-react/src/artifacts/Blank.json @@ -40,7 +40,7 @@ "path": "/mnt/user-data/jan/aztec-packages/yarn-project/boxes/blank-react/src/contracts/src/main.nr" }, "35": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "36": { diff --git a/yarn-project/boxes/blank-react/src/config.ts b/yarn-project/boxes/blank-react/src/config.ts index f693a4c7fe9..6521f4df01c 100644 --- a/yarn-project/boxes/blank-react/src/config.ts +++ b/yarn-project/boxes/blank-react/src/config.ts @@ -1,6 +1,5 @@ import { BlankContractArtifact } from './artifacts/Blank.js'; -import { PXE, createPXEClient } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { ContractArtifact, PXE, createPXEClient } from '@aztec/aztec.js'; // update this if using a different contract diff --git a/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts b/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts index 54e971ff68d..d80037eed84 100644 --- a/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts +++ b/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts @@ -1,7 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract, TxReceipt } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { FieldsOf } from '@aztec/foundation/types'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, FieldsOf, PXE, TxReceipt } from '@aztec/aztec.js'; export async function callContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts b/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts index 08725e90dd6..8cde9eb5b9d 100644 --- a/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts +++ b/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts @@ -1,6 +1,4 @@ -import { AztecAddress, CompleteAddress, DeployMethod, Fr } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { PXE } from '@aztec/types'; +import { AztecAddress, CompleteAddress, ContractArtifact, DeployMethod, Fr, PXE } from '@aztec/aztec.js'; export async function deployContract( activeWallet: CompleteAddress, diff --git a/yarn-project/boxes/blank-react/src/scripts/util.ts b/yarn-project/boxes/blank-react/src/scripts/util.ts index bea2bcba47f..cde41403d55 100644 --- a/yarn-project/boxes/blank-react/src/scripts/util.ts +++ b/yarn-project/boxes/blank-react/src/scripts/util.ts @@ -1,6 +1,12 @@ -import { AccountWallet, Fr, getSandboxAccountsWallets } from '@aztec/aztec.js'; -import { FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { PXE, CompleteAddress } from '@aztec/types'; +import { + AccountWallet, + CompleteAddress, + Fr, + FunctionArtifact, + PXE, + encodeArguments, + getSandboxAccountsWallets, +} from '@aztec/aztec.js'; export function convertArgs(functionAbi: FunctionArtifact, args: any): Fr[] { const untypedArgs = functionAbi.parameters.map(param => { diff --git a/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts b/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts index 8b3bfd8e901..beff0032c77 100644 --- a/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts +++ b/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function viewContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts b/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts index 6f906bfe35c..62fc7b7d1da 100644 --- a/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts +++ b/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts @@ -9,10 +9,10 @@ import { PXE, TxStatus, Wallet, + createDebugLogger, createPXEClient, waitForSandbox, } from '@aztec/aztec.js'; -import { createDebugLogger } from '@aztec/foundation/log'; const logger = createDebugLogger('aztec:http-pxe-client'); diff --git a/yarn-project/boxes/blank-react/tsconfig.dest.json b/yarn-project/boxes/blank-react/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/blank-react/tsconfig.dest.json +++ b/yarn-project/boxes/blank-react/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/blank-react/tsconfig.json b/yarn-project/boxes/blank-react/tsconfig.json index 755ee612d17..831cd23a900 100644 --- a/yarn-project/boxes/blank-react/tsconfig.json +++ b/yarn-project/boxes/blank-react/tsconfig.json @@ -26,14 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - }, - { - "path": "../../types" - } ] } diff --git a/yarn-project/boxes/blank/package.json b/yarn-project/boxes/blank/package.json index dd7928c6d36..12d5c45576d 100644 --- a/yarn-project/boxes/blank/package.json +++ b/yarn-project/boxes/blank/package.json @@ -37,8 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", "serve": "^14.2.1" }, "devDependencies": { diff --git a/yarn-project/boxes/blank/src/artifacts/Blank.json b/yarn-project/boxes/blank/src/artifacts/Blank.json index 252ef64e52f..19f23ddde4a 100644 --- a/yarn-project/boxes/blank/src/artifacts/Blank.json +++ b/yarn-project/boxes/blank/src/artifacts/Blank.json @@ -91,7 +91,7 @@ "path": "/mnt/user-data/jan/aztec-packages/yarn-project/boxes/blank/src/contracts/src/main.nr" }, "35": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "36": { diff --git a/yarn-project/boxes/blank/src/index.ts b/yarn-project/boxes/blank/src/index.ts index 518e2cfc75b..000a7322f22 100644 --- a/yarn-project/boxes/blank/src/index.ts +++ b/yarn-project/boxes/blank/src/index.ts @@ -5,15 +5,17 @@ import { AztecAddress, CompleteAddress, Contract, + ContractArtifact, DeployMethod, + FieldsOf, Fr, + FunctionArtifact, PXE, TxReceipt, createPXEClient, + encodeArguments, getSandboxAccountsWallets, } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { FieldsOf } from '@aztec/foundation/types'; // docs:end:imports diff --git a/yarn-project/boxes/blank/src/tests/blank.contract.test.ts b/yarn-project/boxes/blank/src/tests/blank.contract.test.ts index 7e076b22bda..6fb153315f0 100644 --- a/yarn-project/boxes/blank/src/tests/blank.contract.test.ts +++ b/yarn-project/boxes/blank/src/tests/blank.contract.test.ts @@ -9,10 +9,10 @@ import { PXE, TxStatus, Wallet, + createDebugLogger, createPXEClient, waitForSandbox, } from '@aztec/aztec.js'; -import { createDebugLogger } from '@aztec/foundation/log'; const logger = createDebugLogger('aztec:blank-box-test'); diff --git a/yarn-project/boxes/blank/tsconfig.dest.json b/yarn-project/boxes/blank/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/blank/tsconfig.dest.json +++ b/yarn-project/boxes/blank/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/blank/tsconfig.json b/yarn-project/boxes/blank/tsconfig.json index 4973d658b21..831cd23a900 100644 --- a/yarn-project/boxes/blank/tsconfig.json +++ b/yarn-project/boxes/blank/tsconfig.json @@ -26,11 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - } ] } diff --git a/yarn-project/boxes/bootstrap.sh b/yarn-project/boxes/bootstrap.sh index 340bc0e8972..9d1b562be36 100755 --- a/yarn-project/boxes/bootstrap.sh +++ b/yarn-project/boxes/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # should be run from yarn-project/boxes original_path=$(pwd) @@ -7,16 +7,16 @@ original_path=$(pwd) for dir in *; do # Check if it's a directory if [ -d "${dir}" ]; then - + cd "${dir}" # Run the compile command echo "Running compile command inside ${dir}..." # Runs ts-node command from "../cli" to use latest "compile" code. this uses the yarn command to use the subpackage ts-node dep yarn compile:local - + # Change back to the original directory cd "${original_path}" - + fi done diff --git a/yarn-project/boxes/run_tests b/yarn-project/boxes/run_tests index 24a089b203c..c358a9366ee 100755 --- a/yarn-project/boxes/run_tests +++ b/yarn-project/boxes/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to run an e2e type test in CI (see .circleci/config.yml). # It pulls images and runs docker-compose, which has the test as the entrypoint. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/yarn-project/boxes/token/package.json b/yarn-project/boxes/token/package.json index d80c4536a6e..81ee3dce74a 100644 --- a/yarn-project/boxes/token/package.json +++ b/yarn-project/boxes/token/package.json @@ -37,9 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", - "@aztec/types": "workspace:^", "classnames": "^2.3.2", "formik": "^2.4.3", "node-sass": "^9.0.0", diff --git a/yarn-project/boxes/token/src/app/components/contract_function_form.tsx b/yarn-project/boxes/token/src/app/components/contract_function_form.tsx index 01abc409157..3f736a4e667 100644 --- a/yarn-project/boxes/token/src/app/components/contract_function_form.tsx +++ b/yarn-project/boxes/token/src/app/components/contract_function_form.tsx @@ -3,8 +3,7 @@ import { callContractFunction, deployContract, viewContractFunction } from '../. import { convertArgs } from '../../scripts/util.js'; import styles from './contract_function_form.module.scss'; import { Button, Loader } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress, Fr } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, ContractArtifact, Fr, FunctionArtifact } from '@aztec/aztec.js'; import { useFormik } from 'formik'; import * as Yup from 'yup'; diff --git a/yarn-project/boxes/token/src/app/contract.tsx b/yarn-project/boxes/token/src/app/contract.tsx index 3bf292a5b4b..f4a76c2bfbe 100644 --- a/yarn-project/boxes/token/src/app/contract.tsx +++ b/yarn-project/boxes/token/src/app/contract.tsx @@ -3,8 +3,7 @@ import { Copy } from './components/copy.js'; import { ContractFunctionForm, Popup } from './components/index.js'; import styles from './contract.module.scss'; import { Button, ButtonSize, ButtonTheme, Card, CardTheme, ImageButton, ImageButtonIcon } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress } from '@aztec/aztec.js'; -import { FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, FunctionArtifact } from '@aztec/aztec.js'; import { ReactNode, useState } from 'react'; const functionTypeSortOrder = { diff --git a/yarn-project/boxes/token/src/artifacts/Token.json b/yarn-project/boxes/token/src/artifacts/Token.json index 60623ca669c..65a08e6042d 100644 --- a/yarn-project/boxes/token/src/artifacts/Token.json +++ b/yarn-project/boxes/token/src/artifacts/Token.json @@ -245,7 +245,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -514,7 +514,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -774,7 +774,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1174,7 +1174,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1547,7 +1547,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1823,7 +1823,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2132,7 +2132,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2408,7 +2408,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2698,7 +2698,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -3068,7 +3068,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -3204,7 +3204,7 @@ "path": "std/option.nr" }, "38": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "39": { @@ -3212,7 +3212,7 @@ "path": "/aztec/address.nr" }, "41": { - "source": "use crate::constants_gen::{\n EMPTY_NULLIFIED_COMMITMENT,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n};\n\nuse crate::abi;\n\nuse crate::abi::{\n hash_args,\n CallContext,\n ContractDeploymentData,\n HistoricBlockData,\n FunctionData,\n PrivateCircuitPublicInputs,\n PublicCircuitPublicInputs,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// l1 to l2 messaging\nuse crate::messaging::process_l1_to_l2_message;\nuse crate::private_call_stack_item::PrivateCallStackItem;\nuse crate::public_call_stack_item::PublicCallStackItem;\n\nuse crate::types::{\n vec::BoundedVec,\n point::Point,\n};\n\nuse crate::utils::arr_copy_slice;\n\nuse crate::oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n};\n\nuse dep::std::option::Option;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: abi::PrivateContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n pending_read_requests: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n nullified_commitments: BoundedVec,\n\n private_call_stack : BoundedVec,\n public_call_stack : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_data: HistoricBlockData,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: abi::PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n read_requests: BoundedVec::new(0),\n pending_read_requests: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n nullified_commitments: BoundedVec::new(0),\n\n block_data: inputs.block_data,\n\n private_call_stack: BoundedVec::new(0),\n public_call_stack: BoundedVec::new(0),\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.private_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.private_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn finish(self) -> abi::PrivateCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let encrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let encrypted_log_preimages_length = 0;\n let unencrypted_log_preimages_length = 0;\n\n let priv_circuit_pub_inputs = abi::PrivateCircuitPublicInputs {\n call_context: self.inputs.call_context,\n args_hash: self.args_hash,\n return_values: self.return_values.storage,\n read_requests: self.read_requests.storage,\n pending_read_requests: self.pending_read_requests.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n nullified_commitments: self.nullified_commitments.storage,\n private_call_stack: self.private_call_stack.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n encrypted_logs_hash: encrypted_logs_hash,\n unencrypted_logs_hash: unencrypted_logs_hash,\n encrypted_log_preimages_length: encrypted_log_preimages_length,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.block_data,\n contract_deployment_data: self.inputs.contract_deployment_data,\n chain_id: self.inputs.private_global_variables.chain_id,\n version: self.inputs.private_global_variables.version,\n };\n priv_circuit_pub_inputs\n }\n\n pub fn push_read_request(&mut self, read_request: Field) {\n self.read_requests.push(read_request);\n }\n\n pub fn push_pending_read_request(&mut self, pending_read_request: Field) {\n self.pending_read_requests.push(pending_read_request);\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n // We never push a zero nullified_commitment as zero is used to indicate the end\n // of a field array in private kernel. This routine transparently replaces a\n // zero value into the special placeholder: EMPTY_NULLIFIED_COMMITMENT.\n pub fn push_new_nullifier(&mut self, nullifier: Field, nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n let mut non_zero_nullified = nullified_commitment;\n if (non_zero_nullified == 0) {\n non_zero_nullified = EMPTY_NULLIFIED_COMMITMENT;\n }\n self.nullified_commitments.push(non_zero_nullified);\n }\n\n // docs:start:context_message_portal\n pub fn message_portal(&mut self, content: Field) \n // docs:end:context_message_portal\n {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n // docs:start:context_consume_l1_to_l2_message\n // docs:start:consume_l1_to_l2_message\n pub fn consume_l1_to_l2_message(\n &mut self,\n msg_key: Field,\n content: Field,\n secret: Field\n ) \n // docs:end:context_consume_l1_to_l2_message\n {\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, self.this_address(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n // docs:end:consume_l1_to_l2_message\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_private_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_private_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_private_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n ) -> [Field; RETURN_VALUES_LENGTH] {\n self.call_private_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_private_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let fields = call_private_function_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PrivateCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PrivateCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n // TODO handle the offsets as a variable incremented during extraction?\n args_hash: fields[12],\n return_values: arr_copy_slice(fields, [0; RETURN_VALUES_LENGTH], 13),\n read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 17),\n pending_read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 49),\n new_commitments: arr_copy_slice(fields, [0; MAX_NEW_COMMITMENTS_PER_CALL], 81),\n new_nullifiers: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 97),\n nullified_commitments: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 113),\n private_call_stack: arr_copy_slice(fields, [0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], 129),\n public_call_stack: arr_copy_slice(fields, [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], 133),\n new_l2_to_l1_msgs: arr_copy_slice(fields, [0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL], 137),\n encrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 139),\n unencrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 141),\n encrypted_log_preimages_length: fields[143],\n unencrypted_log_preimages_length: fields[144],\n block_data: HistoricBlockData {\n // Must match order in `private_circuit_public_inputs.hpp`\n note_hash_tree_root : fields[145],\n nullifier_tree_root : fields[146],\n contract_tree_root : fields[147],\n l1_to_l2_messages_tree_root : fields[148],\n blocks_tree_root : fields[149],\n public_data_tree_root: fields[150],\n global_variables_hash: fields[151],\n },\n contract_deployment_data: ContractDeploymentData {\n deployer_public_key: Point::new(fields[152], fields[153]),\n constructor_vk_hash : fields[154],\n function_tree_root : fields[155],\n contract_address_salt : fields[156],\n portal_contract_address : fields[157],\n },\n chain_id: fields[158],\n version: fields[159],\n },\n is_execution_request: fields[160] as bool,\n };\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n\n assert(args_hash == item.public_inputs.args_hash);\n\n assert(item.is_execution_request == false);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.private_call_stack.push(item.hash());\n\n item.public_inputs.return_values\n }\n\n pub fn call_public_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_public_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_public_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field,\n ) {\n self.call_public_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_public_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) {\n let fields = enqueue_public_function_call_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PublicCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PublicCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n args_hash: fields[12],\n return_values: [0; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [0; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [0; MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs:[0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash:[0; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: 0,\n block_data: HistoricBlockData::empty(),\n prover_address: 0,\n },\n is_execution_request: true,\n };\n\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n \n assert(args_hash == item.public_inputs.args_hash);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.public_call_stack.push(item.hash());\n }\n}\n\nuse crate::abi::{\n ContractStorageRead,\n ContractStorageUpdateRequest\n};\n\nstruct PublicContext {\n inputs: abi::PublicContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n contract_storage_update_requests: BoundedVec,\n contract_storage_read: BoundedVec,\n public_call_stack: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n\n new_l2_to_l1_msgs: BoundedVec,\n\n unencrypted_logs_hash: BoundedVec,\n unencrypted_logs_preimages_length: Field,\n\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicContext {\n pub fn new(inputs: abi::PublicContextInputs, args_hash: Field) -> PublicContext {\n let empty_storage_read = ContractStorageRead::empty();\n let empty_storage_update = ContractStorageUpdateRequest::empty();\n PublicContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n contract_storage_update_requests: BoundedVec::new(empty_storage_update),\n contract_storage_read: BoundedVec::new(empty_storage_read),\n public_call_stack: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n \n unencrypted_logs_hash: BoundedVec::new(0),\n unencrypted_logs_preimages_length: 0,\n\n block_data: inputs.block_data,\n prover_address: 0,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.public_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.public_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn block_number(self) -> Field {\n self.inputs.public_global_variables.block_number\n }\n\n pub fn timestamp(self) -> Field {\n self.inputs.public_global_variables.timestamp\n }\n\n pub fn finish(self) -> abi::PublicCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_log_preimages_length = 0;\n\n\n // Compute the public call stack hashes\n let pub_circuit_pub_inputs = abi::PublicCircuitPublicInputs {\n call_context: self.inputs.call_context, // Done\n args_hash: self.args_hash, // Done\n contract_storage_update_requests: self.contract_storage_update_requests.storage,\n contract_storage_read: self.contract_storage_read.storage,\n return_values: self.return_values.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n unencrypted_logs_hash: unencrypted_logs_hash,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.inputs.block_data,\n prover_address: self.prover_address,\n };\n pub_circuit_pub_inputs\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, _nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n }\n\n pub fn message_portal(&mut self, content: Field) {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n pub fn consume_l1_to_l2_message(&mut self, msg_key: Field, content: Field, secret: Field) {\n let this = (*self).this_address();\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, this, msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_public_function(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n args: [Field; ARGS_COUNT],\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = abi::hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}", + "source": "use crate::constants_gen::{\n EMPTY_NULLIFIED_COMMITMENT,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n};\n\nuse crate::abi;\n\nuse crate::abi::{\n hash_args,\n CallContext,\n ContractDeploymentData,\n HistoricBlockData,\n FunctionData,\n PrivateCircuitPublicInputs,\n PublicCircuitPublicInputs,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// l1 to l2 messaging\nuse crate::messaging::process_l1_to_l2_message;\nuse crate::private_call_stack_item::PrivateCallStackItem;\nuse crate::public_call_stack_item::PublicCallStackItem;\n\nuse crate::types::{\n vec::BoundedVec,\n point::Point,\n};\n\nuse crate::utils::arr_copy_slice;\n\nuse crate::oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n};\n\nuse dep::std::option::Option;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: abi::PrivateContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n pending_read_requests: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n nullified_commitments: BoundedVec,\n\n private_call_stack : BoundedVec,\n public_call_stack : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_data: HistoricBlockData,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: abi::PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n read_requests: BoundedVec::new(0),\n pending_read_requests: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n nullified_commitments: BoundedVec::new(0),\n\n block_data: inputs.block_data,\n\n private_call_stack: BoundedVec::new(0),\n public_call_stack: BoundedVec::new(0),\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.private_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.private_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn finish(self) -> abi::PrivateCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let encrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let encrypted_log_preimages_length = 0;\n let unencrypted_log_preimages_length = 0;\n\n let priv_circuit_pub_inputs = abi::PrivateCircuitPublicInputs {\n call_context: self.inputs.call_context,\n args_hash: self.args_hash,\n return_values: self.return_values.storage,\n read_requests: self.read_requests.storage,\n pending_read_requests: self.pending_read_requests.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n nullified_commitments: self.nullified_commitments.storage,\n private_call_stack: self.private_call_stack.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n encrypted_logs_hash: encrypted_logs_hash,\n unencrypted_logs_hash: unencrypted_logs_hash,\n encrypted_log_preimages_length: encrypted_log_preimages_length,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.block_data,\n contract_deployment_data: self.inputs.contract_deployment_data,\n chain_id: self.inputs.private_global_variables.chain_id,\n version: self.inputs.private_global_variables.version,\n };\n priv_circuit_pub_inputs\n }\n\n pub fn push_read_request(&mut self, read_request: Field) {\n self.read_requests.push(read_request);\n }\n\n pub fn push_pending_read_request(&mut self, pending_read_request: Field) {\n self.pending_read_requests.push(pending_read_request);\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n // We never push a zero nullified_commitment as zero is used to indicate the end\n // of a field array in private kernel. This routine transparently replaces a\n // zero value into the special placeholder: EMPTY_NULLIFIED_COMMITMENT.\n pub fn push_new_nullifier(&mut self, nullifier: Field, nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n let mut non_zero_nullified = nullified_commitment;\n if (non_zero_nullified == 0) {\n non_zero_nullified = EMPTY_NULLIFIED_COMMITMENT;\n }\n self.nullified_commitments.push(non_zero_nullified);\n }\n\n // docs:start:context_message_portal\n pub fn message_portal(&mut self, content: Field) \n // docs:end:context_message_portal\n {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n // docs:start:context_consume_l1_to_l2_message\n // docs:start:consume_l1_to_l2_message\n pub fn consume_l1_to_l2_message(\n &mut self,\n msg_key: Field,\n content: Field,\n secret: Field\n ) \n // docs:end:context_consume_l1_to_l2_message\n {\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, self.this_address(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n // docs:end:consume_l1_to_l2_message\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_private_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_private_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_private_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n ) -> [Field; RETURN_VALUES_LENGTH] {\n self.call_private_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_private_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let fields = call_private_function_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PrivateCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PrivateCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n // TODO handle the offsets as a variable incremented during extraction?\n args_hash: fields[12],\n return_values: arr_copy_slice(fields, [0; RETURN_VALUES_LENGTH], 13),\n read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 17),\n pending_read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 49),\n new_commitments: arr_copy_slice(fields, [0; MAX_NEW_COMMITMENTS_PER_CALL], 81),\n new_nullifiers: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 97),\n nullified_commitments: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 113),\n private_call_stack: arr_copy_slice(fields, [0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], 129),\n public_call_stack: arr_copy_slice(fields, [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], 133),\n new_l2_to_l1_msgs: arr_copy_slice(fields, [0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL], 137),\n encrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 139),\n unencrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 141),\n encrypted_log_preimages_length: fields[143],\n unencrypted_log_preimages_length: fields[144],\n block_data: HistoricBlockData {\n // Must match order in `private_circuit_public_inputs.hpp`\n note_hash_tree_root : fields[145],\n nullifier_tree_root : fields[146],\n contract_tree_root : fields[147],\n l1_to_l2_messages_tree_root : fields[148],\n archive_root : fields[149],\n public_data_tree_root: fields[150],\n global_variables_hash: fields[151],\n },\n contract_deployment_data: ContractDeploymentData {\n deployer_public_key: Point::new(fields[152], fields[153]),\n constructor_vk_hash : fields[154],\n function_tree_root : fields[155],\n contract_address_salt : fields[156],\n portal_contract_address : fields[157],\n },\n chain_id: fields[158],\n version: fields[159],\n },\n is_execution_request: fields[160] as bool,\n };\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n\n assert(args_hash == item.public_inputs.args_hash);\n\n assert(item.is_execution_request == false);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.private_call_stack.push(item.hash());\n\n item.public_inputs.return_values\n }\n\n pub fn call_public_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_public_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_public_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field,\n ) {\n self.call_public_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_public_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) {\n let fields = enqueue_public_function_call_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PublicCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PublicCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n args_hash: fields[12],\n return_values: [0; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [0; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [0; MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs:[0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash:[0; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: 0,\n block_data: HistoricBlockData::empty(),\n prover_address: 0,\n },\n is_execution_request: true,\n };\n\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n \n assert(args_hash == item.public_inputs.args_hash);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.public_call_stack.push(item.hash());\n }\n}\n\nuse crate::abi::{\n ContractStorageRead,\n ContractStorageUpdateRequest\n};\n\nstruct PublicContext {\n inputs: abi::PublicContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n contract_storage_update_requests: BoundedVec,\n contract_storage_read: BoundedVec,\n public_call_stack: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n\n new_l2_to_l1_msgs: BoundedVec,\n\n unencrypted_logs_hash: BoundedVec,\n unencrypted_logs_preimages_length: Field,\n\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicContext {\n pub fn new(inputs: abi::PublicContextInputs, args_hash: Field) -> PublicContext {\n let empty_storage_read = ContractStorageRead::empty();\n let empty_storage_update = ContractStorageUpdateRequest::empty();\n PublicContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n contract_storage_update_requests: BoundedVec::new(empty_storage_update),\n contract_storage_read: BoundedVec::new(empty_storage_read),\n public_call_stack: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n \n unencrypted_logs_hash: BoundedVec::new(0),\n unencrypted_logs_preimages_length: 0,\n\n block_data: inputs.block_data,\n prover_address: 0,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.public_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.public_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn block_number(self) -> Field {\n self.inputs.public_global_variables.block_number\n }\n\n pub fn timestamp(self) -> Field {\n self.inputs.public_global_variables.timestamp\n }\n\n pub fn finish(self) -> abi::PublicCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_log_preimages_length = 0;\n\n\n // Compute the public call stack hashes\n let pub_circuit_pub_inputs = abi::PublicCircuitPublicInputs {\n call_context: self.inputs.call_context, // Done\n args_hash: self.args_hash, // Done\n contract_storage_update_requests: self.contract_storage_update_requests.storage,\n contract_storage_read: self.contract_storage_read.storage,\n return_values: self.return_values.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n unencrypted_logs_hash: unencrypted_logs_hash,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.inputs.block_data,\n prover_address: self.prover_address,\n };\n pub_circuit_pub_inputs\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, _nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n }\n\n pub fn message_portal(&mut self, content: Field) {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n pub fn consume_l1_to_l2_message(&mut self, msg_key: Field, content: Field, secret: Field) {\n let this = (*self).this_address();\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, this, msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_public_function(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n args: [Field; ARGS_COUNT],\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = abi::hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}", "path": "/aztec/context.nr" }, "42": { diff --git a/yarn-project/boxes/token/src/config.ts b/yarn-project/boxes/token/src/config.ts index 94a28ae43bd..86d549894a3 100644 --- a/yarn-project/boxes/token/src/config.ts +++ b/yarn-project/boxes/token/src/config.ts @@ -1,6 +1,5 @@ import { TokenContractArtifact } from './artifacts/Token.js'; -import { PXE, createPXEClient } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { ContractArtifact, PXE, createPXEClient } from '@aztec/aztec.js'; // update this if using a different contract diff --git a/yarn-project/boxes/token/src/contracts/Nargo.toml b/yarn-project/boxes/token/src/contracts/Nargo.toml index 469bf27dcc3..a69d8c878a0 100644 --- a/yarn-project/boxes/token/src/contracts/Nargo.toml +++ b/yarn-project/boxes/token/src/contracts/Nargo.toml @@ -8,4 +8,5 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } value_note = { path = "../../../../aztec-nr/value-note"} safe_math = { path = "../../../../aztec-nr/safe-math" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr b/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr index 37dbcd4ddbe..9be83b78710 100644 --- a/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr +++ b/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/boxes/token/src/contracts/src/types/token_note.nr b/yarn-project/boxes/token/src/contracts/src/types/token_note.nr index fac2b5eeca3..a524d08c9e4 100644 --- a/yarn-project/boxes/token/src/contracts/src/types/token_note.nr +++ b/yarn-project/boxes/token/src/contracts/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -6,7 +7,6 @@ use dep::aztec::{ }, hash::pedersen_hash, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, }; diff --git a/yarn-project/boxes/token/src/scripts/call_contract_function.ts b/yarn-project/boxes/token/src/scripts/call_contract_function.ts index 854803eaa42..b05492f065f 100644 --- a/yarn-project/boxes/token/src/scripts/call_contract_function.ts +++ b/yarn-project/boxes/token/src/scripts/call_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function callContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/token/src/scripts/deploy_contract.ts b/yarn-project/boxes/token/src/scripts/deploy_contract.ts index 84c28cf6318..272ebc637ed 100644 --- a/yarn-project/boxes/token/src/scripts/deploy_contract.ts +++ b/yarn-project/boxes/token/src/scripts/deploy_contract.ts @@ -1,6 +1,4 @@ -import { AztecAddress, CompleteAddress, DeployMethod, Fr } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { PXE } from '@aztec/types'; +import { AztecAddress, CompleteAddress, ContractArtifact, DeployMethod, Fr, PXE } from '@aztec/aztec.js'; export async function deployContract( activeWallet: CompleteAddress, diff --git a/yarn-project/boxes/token/src/scripts/util.ts b/yarn-project/boxes/token/src/scripts/util.ts index 6b6b8884ca8..f2aa3609e15 100644 --- a/yarn-project/boxes/token/src/scripts/util.ts +++ b/yarn-project/boxes/token/src/scripts/util.ts @@ -1,6 +1,12 @@ -import { AccountWallet, Fr, getSandboxAccountsWallets } from '@aztec/aztec.js'; -import { FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { CompleteAddress, PXE } from '@aztec/types'; +import { + AccountWallet, + CompleteAddress, + Fr, + FunctionArtifact, + PXE, + encodeArguments, + getSandboxAccountsWallets, +} from '@aztec/aztec.js'; function convertBasicArg(paramType: string, value: any) { switch (paramType) { diff --git a/yarn-project/boxes/token/src/scripts/view_contract_function.ts b/yarn-project/boxes/token/src/scripts/view_contract_function.ts index 8b3bfd8e901..beff0032c77 100644 --- a/yarn-project/boxes/token/src/scripts/view_contract_function.ts +++ b/yarn-project/boxes/token/src/scripts/view_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function viewContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/token/src/tests/token.contract.test.ts b/yarn-project/boxes/token/src/tests/token.contract.test.ts index 0ec4dee07cc..2ff90186f74 100644 --- a/yarn-project/boxes/token/src/tests/token.contract.test.ts +++ b/yarn-project/boxes/token/src/tests/token.contract.test.ts @@ -2,6 +2,9 @@ import { TokenContract } from '../artifacts/Token.js'; import { TokenSimulator } from './token_simulator.js'; import { AccountWallet, + CompleteAddress, + DebugLogger, + ExtendedNote, Fr, Note, PXE, @@ -9,13 +12,11 @@ import { TxStatus, computeAuthWitMessageHash, computeMessageSecretHash, + createDebugLogger, createPXEClient, getSandboxAccountsWallets, waitForSandbox, } from '@aztec/aztec.js'; -import { CompleteAddress } from '@aztec/circuits.js'; -import { DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { ExtendedNote } from '@aztec/types'; import { afterEach, beforeAll, expect, jest } from '@jest/globals'; // assumes sandbox is running locally, which this script does not trigger diff --git a/yarn-project/boxes/token/tsconfig.dest.json b/yarn-project/boxes/token/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/token/tsconfig.dest.json +++ b/yarn-project/boxes/token/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/token/tsconfig.json b/yarn-project/boxes/token/tsconfig.json index 755ee612d17..831cd23a900 100644 --- a/yarn-project/boxes/token/tsconfig.json +++ b/yarn-project/boxes/token/tsconfig.json @@ -26,14 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - }, - { - "path": "../../types" - } ] } diff --git a/yarn-project/canary/Dockerfile b/yarn-project/canary/Dockerfile index 64629daf71a..3856f455c9f 100644 --- a/yarn-project/canary/Dockerfile +++ b/yarn-project/canary/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18-alpine AS builder +FROM node:18.19.0-alpine AS builder RUN apk update && apk add --no-cache bash jq curl @@ -20,7 +20,7 @@ WORKDIR /usr/src/canary RUN ./scripts/update_packages.sh canary ../end-to-end/ RUN yarn && yarn build -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache udev ttf-freefont chromium bash ENV CHROME_BIN="/usr/bin/chromium-browser" PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" diff --git a/yarn-project/canary/Dockerfile.build b/yarn-project/canary/Dockerfile.build index 3173c2100a4..fadd8daaa8b 100644 --- a/yarn-project/canary/Dockerfile.build +++ b/yarn-project/canary/Dockerfile.build @@ -3,7 +3,7 @@ WORKDIR /usr/src/yarn-project/canary # Productionify. See comment in yarn-project-base/Dockerfile. RUN yarn cache clean && yarn workspaces focus --production -FROM node:18-alpine +FROM node:18.19.0-alpine COPY --from=builder /usr/src /usr/src WORKDIR /usr/src/yarn-project/canary ENTRYPOINT ["yarn", "test"] \ No newline at end of file diff --git a/yarn-project/canary/scripts/cond_run_script b/yarn-project/canary/scripts/cond_run_script index 32834cef2f1..84c03bd4574 100755 --- a/yarn-project/canary/scripts/cond_run_script +++ b/yarn-project/canary/scripts/cond_run_script @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Conditionally runs a script if any dependent code has changed between # the last successful run and the present commit. # diff --git a/yarn-project/canary/scripts/extract_packages.sh b/yarn-project/canary/scripts/extract_packages.sh index 24499e23375..78255e01b24 100755 --- a/yarn-project/canary/scripts/extract_packages.sh +++ b/yarn-project/canary/scripts/extract_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash FILE=$1 diff --git a/yarn-project/canary/scripts/run_tests b/yarn-project/canary/scripts/run_tests index 21894dbedb0..6bde0b451a4 100755 --- a/yarn-project/canary/scripts/run_tests +++ b/yarn-project/canary/scripts/run_tests @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # This script is used to run an e2e test in CI (see config.yml and cond_spot_run_tests). # It sets a few environment variables used inside the docker-compose.yml, pulls images, and runs docker-compose. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace diff --git a/yarn-project/canary/scripts/update_packages.sh b/yarn-project/canary/scripts/update_packages.sh index c8fe7740bb7..085dfd217bd 100755 --- a/yarn-project/canary/scripts/update_packages.sh +++ b/yarn-project/canary/scripts/update_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu DIST_TAG=$1 diff --git a/yarn-project/circuits.js/src/abis/abis.ts b/yarn-project/circuits.js/src/abis/abis.ts index ecee67a7528..6d268d65518 100644 --- a/yarn-project/circuits.js/src/abis/abis.ts +++ b/yarn-project/circuits.js/src/abis/abis.ts @@ -116,10 +116,18 @@ export function computeFunctionLeaf(fnLeaf: FunctionLeafPreimage): Fr { ); } -// The "zero leaf" of the function tree is the hash of 5 zero fields. -// TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? -const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); -const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); +let functionTreeRootCalculator: MerkleTreeCalculator | undefined; +/** + * The "zero leaf" of the function tree is the hash of 5 zero fields. + * TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? + */ +function getFunctionTreeRootCalculator() { + if (!functionTreeRootCalculator) { + const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); + functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); + } + return functionTreeRootCalculator; +} /** * Computes a function tree from function leaves. @@ -128,7 +136,9 @@ const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT */ export function computeFunctionTree(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return functionTreeRootCalculator.computeTree(leaves).map(b => Fr.fromBuffer(b)); + return getFunctionTreeRootCalculator() + .computeTree(leaves) + .map(b => Fr.fromBuffer(b)); } /** @@ -138,7 +148,7 @@ export function computeFunctionTree(fnLeaves: Fr[]) { */ export function computeFunctionTreeRoot(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return Fr.fromBuffer(functionTreeRootCalculator.computeTreeRoot(leaves)); + return Fr.fromBuffer(getFunctionTreeRootCalculator().computeTreeRoot(leaves)); } /** @@ -535,7 +545,7 @@ function computePrivateInputsHash(input: PrivateCircuitPublicInputs) { input.blockHeader.nullifierTreeRoot.toBuffer(), input.blockHeader.contractTreeRoot.toBuffer(), input.blockHeader.l1ToL2MessagesTreeRoot.toBuffer(), - input.blockHeader.blocksTreeRoot.toBuffer(), + input.blockHeader.archiveRoot.toBuffer(), input.blockHeader.publicDataTreeRoot.toBuffer(), input.blockHeader.globalVariablesHash.toBuffer(), computeContractDeploymentDataHash(input.contractDeploymentData).toBuffer(), @@ -603,7 +613,7 @@ function computePublicInputsHash(input: PublicCircuitPublicInputs) { input.blockHeader.nullifierTreeRoot.toBuffer(), input.blockHeader.contractTreeRoot.toBuffer(), input.blockHeader.l1ToL2MessagesTreeRoot.toBuffer(), - input.blockHeader.blocksTreeRoot.toBuffer(), + input.blockHeader.archiveRoot.toBuffer(), input.blockHeader.publicDataTreeRoot.toBuffer(), input.blockHeader.globalVariablesHash.toBuffer(), input.proverAddress.toBuffer(), diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts deleted file mode 100644 index 58966728949..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { MerkleTreeRootCalculator } from './merkle_tree_root_calculator.js'; - -describe('merkle tree root calculator', () => { - it('should correctly handle no leaves', () => { - // Height of 3 is 8 leaves. - const calculator = new MerkleTreeRootCalculator(4); - const expected = calculator.computeTreeRoot(new Array(8).fill(new Fr(0)).map(fr => fr.toBuffer())); - expect(calculator.computeTreeRoot()).toEqual(expected); - }); - - it('should correctly leverage zero hashes', () => { - const calculator = new MerkleTreeRootCalculator(4); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(Buffer.alloc(32))]; - const expected = calculator.computeTreeRoot(padded); - const result = calculator.computeTreeRoot(leaves); - expect(result).not.toBeUndefined(); - expect(result).toEqual(expected); - }); - - it('should correctly handle non default zero leaf', () => { - const zeroLeaf = new Fr(666).toBuffer(); - const calculator = new MerkleTreeRootCalculator(4, zeroLeaf); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(zeroLeaf)]; - const expected = calculator.computeTreeRoot(padded); - expect(calculator.computeTreeRoot(leaves)).toEqual(expected); - }); -}); diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts deleted file mode 100644 index 904eec35776..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { pedersenHash } from '@aztec/foundation/crypto'; - -/** - * Calculates the root of a merkle tree. - */ -export class MerkleTreeRootCalculator { - private zeroHashes: Buffer[]; - - constructor(private height: number, zeroLeaf = Buffer.alloc(32)) { - this.zeroHashes = Array.from({ length: height }).reduce( - (acc: Buffer[], _, i) => [...acc, pedersenHash([acc[i], acc[i]])], - [zeroLeaf], - ); - } - - computeTreeRoot(leaves: Buffer[] = []) { - if (leaves.length === 0) { - return this.zeroHashes[this.zeroHashes.length - 1]; - } - - for (let i = 0; i < this.height; ++i) { - let j = 0; - for (; j < leaves.length / 2; ++j) { - const l = leaves[j * 2]; - const r = leaves[j * 2 + 1] || this.zeroHashes[i]; - leaves[j] = pedersenHash([l, r]); - } - leaves = leaves.slice(0, j); - } - - return leaves[0]; - } -} diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index 20e0e133b9c..cf3a8a5ddec 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -2,10 +2,6 @@ import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; import { Buffer } from 'buffer'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * AES-128-CBC encryption/decryption. */ @@ -28,6 +24,7 @@ export class Aes128 { } const input = Buffer.concat([data, paddingBuffer]); + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesEncryptBufferCbc(new RawBuffer(input), new RawBuffer(iv), new RawBuffer(key), input.length), ); @@ -41,6 +38,7 @@ export class Aes128 { * @returns Decrypted data. */ public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts index e52933eccf4..3b7dd7d3d73 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts @@ -4,22 +4,22 @@ import { EcdsaSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * ECDSA signature construction and helper operations. + * TODO: Replace with codegen api on bb.js. */ export class Ecdsa { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a secp256k1 public key from a private key. * @param privateKey - Secp256k1 private key. * @returns A secp256k1 public key. */ public computePublicKey(privateKey: Buffer): Buffer { - wasm.writeMemory(0, privateKey); - wasm.call('ecdsa__compute_public_key', 0, 32); - return Buffer.from(wasm.getMemorySlice(32, 96)); + this.wasm.writeMemory(0, privateKey); + this.wasm.call('ecdsa__compute_public_key', 0, 32); + return Buffer.from(this.wasm.getMemorySlice(32, 96)); } /** @@ -29,15 +29,15 @@ export class Ecdsa { * @returns An ECDSA signature of the form (r, s, v). */ public constructSignature(msg: Uint8Array, privateKey: Buffer) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, privateKey); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, privateKey); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); return new EcdsaSignature( - Buffer.from(wasm.getMemorySlice(32, 64)), - Buffer.from(wasm.getMemorySlice(64, 96)), - Buffer.from(wasm.getMemorySlice(96, 97)), + Buffer.from(this.wasm.getMemorySlice(32, 64)), + Buffer.from(this.wasm.getMemorySlice(64, 96)), + Buffer.from(this.wasm.getMemorySlice(96, 97)), ); } @@ -48,14 +48,14 @@ export class Ecdsa { * @returns The secp256k1 public key of the signer. */ public recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Buffer { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, sig.r); - wasm.writeMemory(32, sig.s); - wasm.writeMemory(64, sig.v); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, sig.r); + this.wasm.writeMemory(32, sig.s); + this.wasm.writeMemory(64, sig.v); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); - return Buffer.from(wasm.getMemorySlice(65, 129)); + return Buffer.from(this.wasm.getMemorySlice(65, 129)); } /** @@ -66,12 +66,12 @@ export class Ecdsa { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, pubKey); - wasm.writeMemory(64, sig.r); - wasm.writeMemory(96, sig.s); - wasm.writeMemory(128, sig.v); - wasm.writeMemory(mem, msg); - return wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, pubKey); + this.wasm.writeMemory(64, sig.r); + this.wasm.writeMemory(96, sig.s); + this.wasm.writeMemory(128, sig.v); + this.wasm.writeMemory(mem, msg); + return this.wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts index 5a18f988c40..cea06e33b1e 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts @@ -4,7 +4,7 @@ import { mapTuple } from '@aztec/foundation/serialize'; import { randomBytes } from 'crypto'; -import { Signature } from '../index.js'; +import { Signature } from '../signature/index.js'; /** * ECDSA signature used for transactions. diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts index a41c0af1fa0..3abf74fd20a 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts @@ -1,15 +1,12 @@ import { BarretenbergSync } from '@aztec/bb.js'; -import { Fr, Point } from '@aztec/foundation/fields'; - -import { GrumpkinScalar } from '../../../index.js'; - -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); +import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; /** * Grumpkin elliptic curve operations. */ export class Grumpkin { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Point.fromBuffer(Buffer.from([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -33,10 +30,10 @@ export class Grumpkin { * @returns Result of the multiplication. */ public mul(point: Point, scalar: GrumpkinScalar): Point { - wasm.writeMemory(0, point.toBuffer()); - wasm.writeMemory(64, scalar.toBuffer()); - wasm.call('ecc_grumpkin__mul', 0, 64, 96); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(96, 160))); + this.wasm.writeMemory(0, point.toBuffer()); + this.wasm.writeMemory(64, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__mul', 0, 64, 96); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(96, 160))); } /** @@ -49,16 +46,16 @@ export class Grumpkin { const concatenatedPoints: Buffer = Buffer.concat(points.map(point => point.toBuffer())); const pointsByteLength = points.length * Point.SIZE_IN_BYTES; - const mem = wasm.call('bbmalloc', pointsByteLength * 2); + const mem = this.wasm.call('bbmalloc', pointsByteLength * 2); - wasm.writeMemory(mem, concatenatedPoints); - wasm.writeMemory(0, scalar.toBuffer()); - wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); + this.wasm.writeMemory(mem, concatenatedPoints); + this.wasm.writeMemory(0, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); const result: Buffer = Buffer.from( - wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), + this.wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), ); - wasm.call('bbfree', mem); + this.wasm.call('bbfree', mem); const parsedResult: Point[] = []; for (let i = 0; i < pointsByteLength; i += 64) { @@ -72,8 +69,8 @@ export class Grumpkin { * @returns Random field element. */ public getRandomFr(): Fr { - wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(0, 32))); + this.wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(0, 32))); } /** @@ -82,8 +79,8 @@ export class Grumpkin { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer): Fr { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(64, 96))); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(64, 96))); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts index 4ab41700e7d..662d561f3f2 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts @@ -1,27 +1,27 @@ import { BarretenbergSync } from '@aztec/bb.js'; +import { Point } from '@aztec/foundation/fields'; import { numToUInt32BE } from '@aztec/foundation/serialize'; -import { GrumpkinPrivateKey, Point, PublicKey } from '../../../index.js'; +import { GrumpkinPrivateKey, PublicKey } from '../../../types/index.js'; import { SchnorrSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Schnorr signature construction and helper operations. */ export class Schnorr { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a grumpkin public key from a private key. * @param privateKey - The private key. * @returns A grumpkin public key. */ public computePublicKey(privateKey: GrumpkinPrivateKey): PublicKey { - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.call('schnorr_compute_public_key', 0, 32); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(32, 96))); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.call('schnorr_compute_public_key', 0, 32); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -31,12 +31,12 @@ export class Schnorr { * @returns A Schnorr signature of the form (s, e). */ public constructSignature(msg: Uint8Array, privateKey: GrumpkinPrivateKey) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_construct_signature', mem, 0, 32, 64); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_construct_signature', mem, 0, 32, 64); - return new SchnorrSignature(Buffer.from(wasm.getMemorySlice(32, 96))); + return new SchnorrSignature(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -47,13 +47,13 @@ export class Schnorr { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: PublicKey, sig: SchnorrSignature) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, pubKey.toBuffer()); - wasm.writeMemory(64, sig.s); - wasm.writeMemory(96, sig.e); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); - const result = wasm.getMemorySlice(128, 129); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, pubKey.toBuffer()); + this.wasm.writeMemory(64, sig.s); + this.wasm.writeMemory(96, sig.e); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); + const result = this.wasm.getMemorySlice(128, 129); return !Buffer.alloc(1, 0).equals(result); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts index a388ce602fb..f4afdd82346 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts @@ -1,12 +1,11 @@ import { BarretenbergSync } from '@aztec/bb.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Secp256k1 elliptic curve operations. */ export class Secp256k1 { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Buffer.from([ 0x79, 0xbe, 0x66, 0x7e, 0xf9, 0xdc, 0xbb, 0xac, 0x55, 0xa0, 0x62, 0x95, 0xce, 0x87, 0x0b, 0x07, @@ -30,10 +29,10 @@ export class Secp256k1 { * @returns Result of the multiplication. */ public mul(point: Uint8Array, scalar: Uint8Array) { - wasm.writeMemory(0, point); - wasm.writeMemory(64, scalar); - wasm.call('ecc_secp256k1__mul', 0, 64, 96); - return Buffer.from(wasm.getMemorySlice(96, 160)); + this.wasm.writeMemory(0, point); + this.wasm.writeMemory(64, scalar); + this.wasm.call('ecc_secp256k1__mul', 0, 64, 96); + return Buffer.from(this.wasm.getMemorySlice(96, 160)); } /** @@ -41,8 +40,8 @@ export class Secp256k1 { * @returns Random field element. */ public getRandomFr() { - wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); - return Buffer.from(wasm.getMemorySlice(0, 32)); + this.wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); + return Buffer.from(this.wasm.getMemorySlice(0, 32)); } /** @@ -51,8 +50,8 @@ export class Secp256k1 { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer) { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Buffer.from(wasm.getMemorySlice(64, 96)); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Buffer.from(this.wasm.getMemorySlice(64, 96)); } } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 89b201b5e5c..f0cfac323a9 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -42,13 +42,15 @@ export const CONTRACT_SUBTREE_SIBLING_PATH_LENGTH = 15; export const NOTE_HASH_SUBTREE_HEIGHT = 7; export const NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH = 25; export const NULLIFIER_SUBTREE_HEIGHT = 7; -export const BLOCKS_TREE_HEIGHT = 16; +export const ARCHIVE_HEIGHT = 16; export const NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH = 13; export const L1_TO_L2_MSG_SUBTREE_HEIGHT = 4; export const L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH = 12; export const FUNCTION_SELECTOR_NUM_BYTES = 4; export const MAPPING_SLOT_PEDERSEN_SEPARATOR = 4; export const NUM_FIELDS_PER_SHA256 = 2; +export const ARGS_HASH_CHUNK_LENGTH = 32; +export const ARGS_HASH_CHUNK_COUNT = 16; export const L1_TO_L2_MESSAGE_LENGTH = 8; export const L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH = 26; export const MAX_NOTE_FIELDS_LENGTH = 20; diff --git a/yarn-project/circuits.js/src/scripts/constants.in.ts b/yarn-project/circuits.js/src/scripts/constants.in.ts index 123bea6ffce..14892b1e1fe 100644 --- a/yarn-project/circuits.js/src/scripts/constants.in.ts +++ b/yarn-project/circuits.js/src/scripts/constants.in.ts @@ -3,7 +3,7 @@ import { fileURLToPath } from '@aztec/foundation/url'; import * as fs from 'fs'; import { dirname, join } from 'path'; -const NOIR_CONSTANTS_FILE = '../../../aztec-nr/aztec/src/constants_gen.nr'; +const NOIR_CONSTANTS_FILE = '../../../noir-protocol-circuits/src/crates/types/src/constants.nr'; const TS_CONSTANTS_FILE = '../constants.gen.ts'; const SOLIDITY_CONSTANTS_FILE = '../../../../l1-contracts/src/core/libraries/ConstantsGen.sol'; diff --git a/yarn-project/circuits.js/src/structs/kernel/block_header.ts b/yarn-project/circuits.js/src/structs/kernel/block_header.ts index 1c83f50351c..f7059e64b60 100644 --- a/yarn-project/circuits.js/src/structs/kernel/block_header.ts +++ b/yarn-project/circuits.js/src/structs/kernel/block_header.ts @@ -31,9 +31,9 @@ export class BlockHeader { */ public l1ToL2MessagesTreeRoot: Fr, /** - * Root of the blocks tree at the time of when this information was assembled. + * Root of the state roots tree (archive) at the block prior to when this information was assembled. */ - public blocksTreeRoot: Fr, + public archiveRoot: Fr, /** * Root of the private kernel vk tree at the time of when this information was assembled. */ @@ -71,7 +71,7 @@ export class BlockHeader { fields.nullifierTreeRoot, fields.contractTreeRoot, fields.l1ToL2MessagesTreeRoot, - fields.blocksTreeRoot, + fields.archiveRoot, fields.privateKernelVkTreeRoot, fields.publicDataTreeRoot, fields.globalVariablesHash, @@ -97,7 +97,7 @@ export class BlockHeader { this.nullifierTreeRoot, this.contractTreeRoot, this.l1ToL2MessagesTreeRoot, - this.blocksTreeRoot, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as + this.archiveRoot, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as // it is not present in noir, this.publicDataTreeRoot, this.globalVariablesHash, @@ -128,7 +128,7 @@ export class BlockHeader { this.nullifierTreeRoot.isZero() && this.contractTreeRoot.isZero() && this.l1ToL2MessagesTreeRoot.isZero() && - this.blocksTreeRoot.isZero() && + this.archiveRoot.isZero() && this.privateKernelVkTreeRoot.isZero() && this.publicDataTreeRoot.isZero() && this.globalVariablesHash.isZero() diff --git a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts index 1d6babb0ebe..c6db387a093 100644 --- a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts @@ -1,8 +1,10 @@ +import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, Tuple } from '@aztec/foundation/serialize'; +import { IndexedTreeLeaf, IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_SUBTREE_SIBLING_PATH_LENGTH, KERNELS_PER_BASE_ROLLUP, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, @@ -25,28 +27,103 @@ import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; * Class containing the data of a preimage of a single leaf in the nullifier tree. * Note: It's called preimage because this data gets hashed before being inserted as a node into the `IndexedTree`. */ -export class NullifierLeafPreimage { +export class NullifierLeafPreimage implements IndexedTreeLeafPreimage { constructor( /** * Leaf value inside the indexed tree's linked list. */ - public leafValue: Fr, + public nullifier: Fr, /** * Next value inside the indexed tree's linked list. */ - public nextValue: Fr, + public nextNullifier: Fr, /** * Index of the next leaf in the indexed tree's linked list. */ - public nextIndex: UInt32, + public nextIndex: bigint, ) {} - toBuffer() { - return serializeToBuffer(this.leafValue, this.nextValue, this.nextIndex); + getKey(): bigint { + return this.nullifier.toBigInt(); + } + + getNextKey(): bigint { + return this.nextNullifier.toBigInt(); + } + + getNextIndex(): bigint { + return this.nextIndex; + } + + asLeaf(): NullifierLeaf { + return new NullifierLeaf(this.nullifier); + } + + toBuffer(): Buffer { + return Buffer.concat(this.toHashInputs()); + } + + toHashInputs(): Buffer[] { + return [ + Buffer.from(this.nullifier.toBuffer()), + Buffer.from(toBufferBE(this.nextIndex, 32)), + Buffer.from(this.nextNullifier.toBuffer()), + ]; + } + + clone(): NullifierLeafPreimage { + return new NullifierLeafPreimage(this.nullifier, this.nextNullifier, this.nextIndex); + } + + static empty(): NullifierLeafPreimage { + return new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0n); + } + + static fromBuffer(buf: Buffer): NullifierLeafPreimage { + const nullifier = Fr.fromBuffer(buf.subarray(0, 32)); + const nextIndex = toBigIntBE(buf.subarray(32, 64)); + const nextNullifier = Fr.fromBuffer(buf.subarray(64, 96)); + return new NullifierLeafPreimage(nullifier, nextNullifier, nextIndex); + } + + static fromLeaf(leaf: NullifierLeaf, nextKey: bigint, nextIndex: bigint): NullifierLeafPreimage { + return new NullifierLeafPreimage(leaf.nullifier, new Fr(nextKey), nextIndex); + } + + static clone(preimage: NullifierLeafPreimage): NullifierLeafPreimage { + return new NullifierLeafPreimage(preimage.nullifier, preimage.nextNullifier, preimage.nextIndex); + } +} + +/** + * A nullifier to be inserted in the nullifier tree. + */ +export class NullifierLeaf implements IndexedTreeLeaf { + constructor( + /** + * Nullifier value. + */ + public nullifier: Fr, + ) {} + + getKey(): bigint { + return this.nullifier.toBigInt(); + } + + toBuffer(): Buffer { + return this.nullifier.toBuffer(); + } + + isEmpty(): boolean { + return this.nullifier.isZero(); + } + + static buildDummy(key: bigint): NullifierLeaf { + return new NullifierLeaf(new Fr(key)); } - static empty() { - return new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0); + static fromBuffer(buf: Buffer): NullifierLeaf { + return new NullifierLeaf(Fr.fromBuffer(buf)); } } @@ -58,7 +135,7 @@ export class ConstantRollupData { /** * Snapshot of the blocks tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public archiveSnapshot: AppendOnlyTreeSnapshot, /** * Root of the private kernel verification key tree. @@ -100,7 +177,7 @@ export class ConstantRollupData { static getFields(fields: FieldsOf) { return [ - fields.startBlocksTreeSnapshot, + fields.archiveSnapshot, fields.privateKernelVkTreeRoot, fields.publicKernelVkTreeRoot, fields.baseRollupVkHash, @@ -142,7 +219,7 @@ export class BaseRollupInputs { /** * Snapshot of the blocks tree at the start of the base rollup circuit. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public archiveSnapshot: AppendOnlyTreeSnapshot, /** * The nullifiers to be inserted in the tree, sorted high to low. @@ -196,8 +273,8 @@ export class BaseRollupInputs { /** * Membership witnesses of blocks referred by each of the 2 kernels. */ - public blocksTreeRootMembershipWitnesses: Tuple< - MembershipWitness, + public archiveRootMembershipWitnesses: Tuple< + MembershipWitness, typeof KERNELS_PER_BASE_ROLLUP >, /** @@ -217,7 +294,7 @@ export class BaseRollupInputs { fields.startNullifierTreeSnapshot, fields.startContractTreeSnapshot, fields.startPublicDataTreeRoot, - fields.startBlocksTreeSnapshot, + fields.archiveSnapshot, fields.sortedNewNullifiers, fields.sortednewNullifiersIndexes, fields.lowNullifierLeafPreimages, @@ -227,7 +304,7 @@ export class BaseRollupInputs { fields.newContractsSubtreeSiblingPath, fields.newPublicDataUpdateRequestsSiblingPaths, fields.newPublicDataReadsSiblingPaths, - fields.blocksTreeRootMembershipWitnesses, + fields.archiveRootMembershipWitnesses, fields.constants, ] as const; } diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index a393014b4cd..ddcb144be63 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -2,7 +2,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, Tuple } from '@aztec/foundation/serialize'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '../../constants.gen.js'; @@ -39,11 +39,11 @@ export class RootRollupInputs { /** * Snapshot of the historical block roots tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public startArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Sibling path of the new block tree root. */ - public newBlocksTreeSiblingPath: Tuple, + public newArchiveSiblingPath: Tuple, ) {} toBuffer() { @@ -60,8 +60,8 @@ export class RootRollupInputs { fields.newL1ToL2Messages, fields.newL1ToL2MessagesTreeRootSiblingPath, fields.startL1ToL2MessagesTreeSnapshot, - fields.startBlocksTreeSnapshot, - fields.newBlocksTreeSiblingPath, + fields.startArchiveSnapshot, + fields.newArchiveSiblingPath, ] as const; } } @@ -131,11 +131,11 @@ export class RootRollupPublicInputs { /** * Snapshot of the blocks tree roots tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public startArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Snapshot of the blocks tree roots tree at the end of the rollup. */ - public endBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public endArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Hash of the calldata. @@ -161,8 +161,8 @@ export class RootRollupPublicInputs { fields.endPublicDataTreeRoot, fields.startL1ToL2MessagesTreeSnapshot, fields.endL1ToL2MessagesTreeSnapshot, - fields.startBlocksTreeSnapshot, - fields.endBlocksTreeSnapshot, + fields.startArchiveSnapshot, + fields.endArchiveSnapshot, fields.calldataHash, fields.l1ToL2MessagesHash, ] as const; diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 078393f7ba8..7c39a943fda 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -4,10 +4,10 @@ import { numToUInt32BE } from '@aztec/foundation/serialize'; import { SchnorrSignature } from '../barretenberg/index.js'; import { + ARCHIVE_HEIGHT, ARGS_LENGTH, AggregationObject, AppendOnlyTreeSnapshot, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, BlockHeader, @@ -726,7 +726,7 @@ export function makeConstantBaseRollupData( globalVariables: GlobalVariables | undefined = undefined, ): ConstantRollupData { return ConstantRollupData.from({ - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(seed + 0x300), + archiveSnapshot: makeAppendOnlyTreeSnapshot(seed + 0x300), privateKernelVkTreeRoot: fr(seed + 0x401), publicKernelVkTreeRoot: fr(seed + 0x402), baseRollupVkHash: fr(seed + 0x403), @@ -840,7 +840,7 @@ export function makeRootRollupInputs(seed = 0, globalVariables?: GlobalVariables makeTuple(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, fr, 0x2100), makeAppendOnlyTreeSnapshot(seed + 0x2200), makeAppendOnlyTreeSnapshot(seed + 0x2200), - makeTuple(BLOCKS_TREE_HEIGHT, fr, 0x2400), + makeTuple(ARCHIVE_HEIGHT, fr, 0x2400), ); } @@ -868,8 +868,8 @@ export function makeRootRollupPublicInputs( endPublicDataTreeRoot: fr((seed += 0x100)), startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - endBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), + startArchiveSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), + endArchiveSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), calldataHash: [new Fr(1n), new Fr(2n)], l1ToL2MessagesHash: [new Fr(3n), new Fr(4n)], }); @@ -896,11 +896,11 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { const startNullifierTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x200); const startContractTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x300); const startPublicDataTreeRoot = fr(seed + 0x400); - const startBlocksTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x500); + const startArchiveSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x500); const lowNullifierLeafPreimages = makeTuple( MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, - x => new NullifierLeafPreimage(fr(x), fr(x + 0x100), x + 0x200), + x => new NullifierLeafPreimage(fr(x), fr(x + 0x100), BigInt(x + 0x200)), seed + 0x1000, ); @@ -929,8 +929,8 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { seed + 0x8000, ); - const blocksTreeRootMembershipWitnesses = makeTuple(KERNELS_PER_BASE_ROLLUP, x => - makeMembershipWitness(BLOCKS_TREE_HEIGHT, seed + x * 0x1000 + 0x9000), + const archiveRootMembershipWitnesses = makeTuple(KERNELS_PER_BASE_ROLLUP, x => + makeMembershipWitness(ARCHIVE_HEIGHT, seed + x * 0x1000 + 0x9000), ); const constants = makeConstantBaseRollupData(0x100); @@ -942,7 +942,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { startNullifierTreeSnapshot, startContractTreeSnapshot, startPublicDataTreeRoot, - startBlocksTreeSnapshot, + archiveSnapshot: startArchiveSnapshot, sortedNewNullifiers, sortednewNullifiersIndexes, lowNullifierLeafPreimages, @@ -951,7 +951,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { newContractsSubtreeSiblingPath, newPublicDataUpdateRequestsSiblingPaths, newPublicDataReadsSiblingPaths, - blocksTreeRootMembershipWitnesses, + archiveRootMembershipWitnesses, constants, }); } diff --git a/yarn-project/cli/Dockerfile b/yarn-project/cli/Dockerfile index 0b8570793f7..dc70ce119ca 100644 --- a/yarn-project/cli/Dockerfile +++ b/yarn-project/cli/Dockerfile @@ -1,17 +1,6 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] -# Set Tini as the default entrypoint, to handle ctrl-c etc. -# Why not just yarn start? About 1 second difference in startup time. -# At time of writing it still takes 1.7s to just get the help to print. Needs investigating. -RUN apk add --no-cache tini -ENTRYPOINT ["/sbin/tini", "--", "node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] - -# Setup cache volume. -ENV XDG_CACHE_HOME /cache -RUN mkdir /cache && chmod 777 /cache -VOLUME [ "/cache" ] - -# Run as non-root user. -RUN corepack enable -RUN addgroup -S aztec && adduser -S aztec -G aztec -USER aztec +# The version has been updated in yarn-project-prod. +# Adding COMMIT_TAG here to rebuild versioned image. +ARG COMMIT_TAG="" \ No newline at end of file diff --git a/yarn-project/cli/src/bin/index.ts b/yarn-project/cli/src/bin/index.ts index 014d5e05a24..948d81f2940 100644 --- a/yarn-project/cli/src/bin/index.ts +++ b/yarn-project/cli/src/bin/index.ts @@ -1,6 +1,5 @@ #!/usr/bin/env -S node --no-warnings -import { createDebugLogger } from '@aztec/aztec.js'; -import { createConsoleLogger } from '@aztec/foundation/log'; +import { createConsoleLogger, createDebugLogger } from '@aztec/foundation/log'; import { getProgram } from '../index.js'; @@ -9,6 +8,9 @@ const log = createConsoleLogger(); /** CLI main entrypoint */ async function main() { + process.once('SIGINT', () => process.exit(0)); + process.once('SIGTERM', () => process.exit(0)); + const program = getProgram(log, debugLogger); await program.parseAsync(process.argv); } diff --git a/yarn-project/cli/src/cmds/add_contract.ts b/yarn-project/cli/src/cmds/add_contract.ts new file mode 100644 index 00000000000..6ac361f1fbc --- /dev/null +++ b/yarn-project/cli/src/cmds/add_contract.ts @@ -0,0 +1,27 @@ +import { AztecAddress, CompleteAddress, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function addContract( + rpcUrl: string, + contractArtifactPath: string, + contractAddress: AztecAddress, + partialAddress: Fr, + publicKey: Point, + portalContract: EthAddress | undefined, + debugLogger: DebugLogger, + log: LogFn, +) { + const artifact = await getContractArtifact(contractArtifactPath, log); + const completeAddress = new CompleteAddress(contractAddress, publicKey ?? Fr.ZERO, partialAddress); + const portalContractAddress: EthAddress = portalContract ?? EthAddress.ZERO; + const client = await createCompatibleClient(rpcUrl, debugLogger); + + await client.addContracts([{ artifact, completeAddress, portalContract: portalContractAddress }]); + log(`\nContract added to PXE at ${contractAddress.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/add_note.ts b/yarn-project/cli/src/cmds/add_note.ts new file mode 100644 index 00000000000..33ad5a52f80 --- /dev/null +++ b/yarn-project/cli/src/cmds/add_note.ts @@ -0,0 +1,24 @@ +import { AztecAddress, Fr } from '@aztec/aztec.js'; +import { DebugLogger } from '@aztec/foundation/log'; +import { ExtendedNote, Note, TxHash } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; +import { parseFields } from '../parse_args.js'; + +/** + * + */ +export async function addNote( + address: AztecAddress, + contractAddress: AztecAddress, + storageSlot: Fr, + txHash: TxHash, + noteFields: string[], + rpcUrl: string, + debugLogger: DebugLogger, +) { + const note = new Note(parseFields(noteFields)); + const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.addNote(extendedNote); +} diff --git a/yarn-project/cli/src/cmds/block_number.ts b/yarn-project/cli/src/cmds/block_number.ts new file mode 100644 index 00000000000..37795a12966 --- /dev/null +++ b/yarn-project/cli/src/cmds/block_number.ts @@ -0,0 +1,12 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function blockNumber(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const num = await client.getBlockNumber(); + log(`${num}\n`); +} diff --git a/yarn-project/cli/src/cmds/call.ts b/yarn-project/cli/src/cmds/call.ts new file mode 100644 index 00000000000..7e395276177 --- /dev/null +++ b/yarn-project/cli/src/cmds/call.ts @@ -0,0 +1,35 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { format } from 'util'; + +import { createCompatibleClient } from '../client.js'; +import { getFunctionArtifact, getTxSender, prepTx } from '../utils.js'; + +/** + * + */ +export async function call( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + fromAddress: string | undefined, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const fnArtifact = getFunctionArtifact(contractArtifact, functionName); + if (fnArtifact.parameters.length !== functionArgs.length) { + throw Error( + `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${functionArgs.length}`, + ); + } + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const from = await getTxSender(client, fromAddress); + const result = await client.viewTx(functionName, functionArgs, contractAddress, from); + log(format('\nView result: ', result, '\n')); +} diff --git a/yarn-project/cli/src/cmds/check_deploy.ts b/yarn-project/cli/src/cmds/check_deploy.ts new file mode 100644 index 00000000000..25641418c71 --- /dev/null +++ b/yarn-project/cli/src/cmds/check_deploy.ts @@ -0,0 +1,17 @@ +import { AztecAddress, isContractDeployed } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function checkDeploy(rpcUrl: string, contractAddress: AztecAddress, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const isDeployed = await isContractDeployed(client, contractAddress); + if (isDeployed) { + log(`\nContract found at ${contractAddress.toString()}\n`); + } else { + log(`\nNo contract found at ${contractAddress.toString()}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/compute_selector.ts b/yarn-project/cli/src/cmds/compute_selector.ts new file mode 100644 index 00000000000..d0ef8e14abe --- /dev/null +++ b/yarn-project/cli/src/cmds/compute_selector.ts @@ -0,0 +1,10 @@ +import { FunctionSelector } from '@aztec/foundation/abi'; +import { LogFn } from '@aztec/foundation/log'; + +/** + * + */ +export function computeSelector(functionSignature: string, log: LogFn) { + const selector = FunctionSelector.fromSignature(functionSignature); + log(`${selector}`); +} diff --git a/yarn-project/cli/src/cmds/create_account.ts b/yarn-project/cli/src/cmds/create_account.ts new file mode 100644 index 00000000000..f178409c82d --- /dev/null +++ b/yarn-project/cli/src/cmds/create_account.ts @@ -0,0 +1,39 @@ +import { GrumpkinScalar, getSchnorrAccount } from '@aztec/aztec.js'; +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function createAccount( + rpcUrl: string, + privateKey: Fq, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); + + const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, Fr.ZERO); + const { address, publicKey, partialAddress } = account.getCompleteAddress(); + const tx = await account.deploy(); + const txHash = await tx.getTxHash(); + debugLogger(`Account contract tx sent with hash ${txHash}`); + if (wait) { + log(`\nWaiting for account contract deployment...`); + await tx.wait(); + } else { + log(`\nAccount deployment transaction hash: ${txHash}\n`); + } + + log(`\nNew account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + if (!privateKey) { + log(`Private key: ${actualPrivateKey.toString()}`); + } + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/deploy.ts b/yarn-project/cli/src/cmds/deploy.ts new file mode 100644 index 00000000000..459f5498c12 --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy.ts @@ -0,0 +1,77 @@ +import { ContractDeployer, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { encodeArgs } from '../encoding.js'; +import { GITHUB_TAG_PREFIX } from '../github.js'; +import { getContractArtifact, getFunctionArtifact } from '../utils.js'; + +/** + * + */ +export async function deploy( + artifactPath: string, + json: boolean, + rpcUrl: string, + publicKey: Point | undefined, + rawArgs: any[], + portalAddress: EthAddress, + salt: Fr, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, + logJson: (output: any) => void, +) { + const contractArtifact = await getContractArtifact(artifactPath, log); + const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const nodeInfo = await client.getNodeInfo(); + const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; + if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { + log( + `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, + ); + } + + const deployer = new ContractDeployer(contractArtifact, client, publicKey); + + const constructor = getFunctionArtifact(contractArtifact, 'constructor'); + if (!constructor) { + throw new Error(`Constructor not found in contract ABI`); + } + + debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); + const args = encodeArgs(rawArgs, constructorArtifact!.parameters); + debugLogger(`Encoded arguments: ${args.join(', ')}`); + + const deploy = deployer.deploy(...args); + + await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); + const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); + const txHash = await tx.getTxHash(); + debugLogger(`Deploy tx sent with hash ${txHash}`); + if (wait) { + const deployed = await tx.wait(); + const { address, partialAddress } = deployed.contract.completeAddress; + if (json) { + logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); + } else { + log(`\nContract deployed at ${address.toString()}\n`); + log(`Contract partial address ${partialAddress.toString()}\n`); + } + } else { + const { address, partialAddress } = deploy.completeAddress ?? {}; + if (json) { + logJson({ + address: address?.toString() ?? 'N/A', + partialAddress: partialAddress?.toString() ?? 'N/A', + txHash: txHash.toString(), + }); + } else { + log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); + log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); + log(`Deployment transaction hash: ${txHash}\n`); + } + } +} diff --git a/yarn-project/cli/src/cmds/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts new file mode 100644 index 00000000000..3b45537d88a --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts @@ -0,0 +1,25 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { deployAztecContracts } from '../utils.js'; + +/** + * + */ +export async function deployL1Contracts( + rpcUrl: string, + apiKey: string, + privateKey: string, + mnemonic: string, + log: LogFn, + debugLogger: DebugLogger, +) { + const { l1ContractAddresses } = await deployAztecContracts(rpcUrl, apiKey, privateKey, mnemonic, debugLogger); + + log('\n'); + log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); + log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); + log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); + log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); + log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/example_contracts.ts b/yarn-project/cli/src/cmds/example_contracts.ts new file mode 100644 index 00000000000..a5b71e2ec0d --- /dev/null +++ b/yarn-project/cli/src/cmds/example_contracts.ts @@ -0,0 +1,12 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { getExampleContractArtifacts } from '../utils.js'; + +/** + * + */ +export async function exampleContracts(log: LogFn) { + const abisList = await getExampleContractArtifacts(); + const names = Object.keys(abisList); + names.forEach(name => log(name)); +} diff --git a/yarn-project/cli/src/cmds/generate_p2p_private_key.ts b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts new file mode 100644 index 00000000000..4bf3ad7a5c4 --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts @@ -0,0 +1,13 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; + +/** + * + */ +export async function generateP2PPrivateKey(log: LogFn) { + const peerId = await createSecp256k1PeerId(); + const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); + log(`Private key: ${exportedPeerId}`); + log(`Peer Id: ${peerId}`); +} diff --git a/yarn-project/cli/src/cmds/generate_private_key.ts b/yarn-project/cli/src/cmds/generate_private_key.ts new file mode 100644 index 00000000000..8586f03f37a --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_private_key.ts @@ -0,0 +1,23 @@ +import { GrumpkinScalar, generatePublicKey } from '@aztec/aztec.js'; +import { LogFn } from '@aztec/foundation/log'; + +import { mnemonicToAccount } from 'viem/accounts'; + +/** + * + */ +export function generatePrivateKey(mnemonic: string | undefined, log: LogFn) { + let privKey; + let publicKey; + if (mnemonic) { + const acc = mnemonicToAccount(mnemonic); + // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. + const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); + publicKey = generatePublicKey(key); + } else { + const key = GrumpkinScalar.random(); + privKey = key.toString(); + publicKey = generatePublicKey(key); + } + log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/get_account.ts b/yarn-project/cli/src/cmds/get_account.ts new file mode 100644 index 00000000000..47b3b1056a7 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_account.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccount(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const account = await client.getRegisteredAccount(aztecAddress); + + if (!account) { + log(`Unknown account ${aztecAddress.toString()}`); + } else { + log(account.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_accounts.ts b/yarn-project/cli/src/cmds/get_accounts.ts new file mode 100644 index 00000000000..155e92d5a4e --- /dev/null +++ b/yarn-project/cli/src/cmds/get_accounts.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccounts(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const accounts = await client.getRegisteredAccounts(); + if (!accounts.length) { + log('No accounts found.'); + } else { + log(`Accounts found: \n`); + for (const account of accounts) { + log(account.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_contract_data.ts b/yarn-project/cli/src/cmds/get_contract_data.ts new file mode 100644 index 00000000000..16d13047972 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_contract_data.ts @@ -0,0 +1,39 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { ContractData } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getContractData( + rpcUrl: string, + contractAddress: AztecAddress, + includeBytecode: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const contractDataWithOrWithoutBytecode = includeBytecode + ? await client.getExtendedContractData(contractAddress) + : await client.getContractData(contractAddress); + + if (!contractDataWithOrWithoutBytecode) { + log(`No contract data found at ${contractAddress}`); + return; + } + let contractData: ContractData; + + if ('contractData' in contractDataWithOrWithoutBytecode) { + contractData = contractDataWithOrWithoutBytecode.contractData; + } else { + contractData = contractDataWithOrWithoutBytecode; + } + log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); + log(`Portal: ${contractData.portalContractAddress.toString()}`); + if ('bytecode' in contractDataWithOrWithoutBytecode) { + log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); + } + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/get_logs.ts b/yarn-project/cli/src/cmds/get_logs.ts new file mode 100644 index 00000000000..73a6501b9cf --- /dev/null +++ b/yarn-project/cli/src/cmds/get_logs.ts @@ -0,0 +1,71 @@ +import { AztecAddress, FunctionSelector, LogFilter, LogId, TxHash } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getLogs( + txHash: TxHash, + fromBlock: number, + toBlock: number, + afterLog: LogId, + contractAddress: AztecAddress, + selector: FunctionSelector, + rpcUrl: string, + follow: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const pxe = await createCompatibleClient(rpcUrl, debugLogger); + + if (follow) { + if (txHash) { + throw Error('Cannot use --follow with --tx-hash'); + } + if (toBlock) { + throw Error('Cannot use --follow with --to-block'); + } + } + + const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; + + const fetchLogs = async () => { + const response = await pxe.getUnencryptedLogs(filter); + const logs = response.logs; + + if (!logs.length) { + const filterOptions = Object.entries(filter) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => `${key}: ${value}`) + .join(', '); + if (!follow) { + log(`No logs found for filter: {${filterOptions}}`); + } + } else { + if (!follow && !filter.afterLog) { + log('Logs found: \n'); + } + logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); + // Set the continuation parameter for the following requests + filter.afterLog = logs[logs.length - 1].id; + } + return response.maxLogsHit; + }; + + if (follow) { + log('Fetching logs...'); + while (true) { + const maxLogsHit = await fetchLogs(); + if (!maxLogsHit) { + await sleep(1000); + } + } + } else { + while (await fetchLogs()) { + // Keep fetching logs until we reach the end. + } + } +} diff --git a/yarn-project/cli/src/cmds/get_node_info.ts b/yarn-project/cli/src/cmds/get_node_info.ts new file mode 100644 index 00000000000..bc71a7bc8cc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_node_info.ts @@ -0,0 +1,17 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const info = await client.getNodeInfo(); + log(`\nNode Info:\n`); + log(`Sandbox Version: ${info.sandboxVersion}\n`); + log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); + log(`Chain Id: ${info.chainId}\n`); + log(`Protocol Version: ${info.protocolVersion}\n`); + log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/get_recipient.ts b/yarn-project/cli/src/cmds/get_recipient.ts new file mode 100644 index 00000000000..9edf6edecfc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipient.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipient(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipient = await client.getRecipient(aztecAddress); + + if (!recipient) { + log(`Unknown recipient ${aztecAddress.toString()}`); + } else { + log(recipient.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_recipients.ts b/yarn-project/cli/src/cmds/get_recipients.ts new file mode 100644 index 00000000000..92bc9fad973 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipients.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipients(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipients = await client.getRecipients(); + if (!recipients.length) { + log('No recipients found.'); + } else { + log(`Recipients found: \n`); + for (const recipient of recipients) { + log(recipient.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_tx_receipt.ts b/yarn-project/cli/src/cmds/get_tx_receipt.ts new file mode 100644 index 00000000000..fe133608820 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_tx_receipt.ts @@ -0,0 +1,18 @@ +import { TxHash } from '@aztec/aztec.js'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getTxReceipt(rpcUrl: string, txHash: TxHash, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const receipt = await client.getTxReceipt(txHash); + if (!receipt) { + log(`No receipt found for transaction hash ${txHash.toString()}`); + } else { + log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/inspect_contract.ts b/yarn-project/cli/src/cmds/inspect_contract.ts new file mode 100644 index 00000000000..e55954adc1e --- /dev/null +++ b/yarn-project/cli/src/cmds/inspect_contract.ts @@ -0,0 +1,29 @@ +import { + FunctionSelector, + decodeFunctionSignature, + decodeFunctionSignatureWithParameterNames, +} from '@aztec/foundation/abi'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function inspectContract(contractArtifactFile: string, debugLogger: DebugLogger, log: LogFn) { + const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); + const contractFns = contractArtifact.functions.filter( + f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', + ); + if (contractFns.length === 0) { + log(`No external functions found for contract ${contractArtifact.name}`); + } + for (const fn of contractFns) { + const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); + const signature = decodeFunctionSignature(fn.name, fn.parameters); + const selector = FunctionSelector.fromSignature(signature); + log( + `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, + ); + } +} diff --git a/yarn-project/cli/src/cmds/parse_parameter_struct.ts b/yarn-project/cli/src/cmds/parse_parameter_struct.ts new file mode 100644 index 00000000000..1ef572fd5ce --- /dev/null +++ b/yarn-project/cli/src/cmds/parse_parameter_struct.ts @@ -0,0 +1,30 @@ +import { StructType } from '@aztec/foundation/abi'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { LogFn } from '@aztec/foundation/log'; + +import { parseStructString } from '../encoding.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function parseParameterStruct( + encodedString: string, + contractArtifactPath: string, + parameterName: string, + log: LogFn, +) { + const contractArtifact = await getContractArtifact(contractArtifactPath, log); + const parameterAbitype = contractArtifact.functions + .map(({ parameters }) => parameters) + .flat() + .find(({ name, type }) => name === parameterName && type.kind === 'struct'); + + if (!parameterAbitype) { + log(`No struct parameter found with name ${parameterName}`); + return; + } + + const data = parseStructString(encodedString, parameterAbitype.type as StructType); + log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); +} diff --git a/yarn-project/cli/src/cmds/register_account.ts b/yarn-project/cli/src/cmds/register_account.ts new file mode 100644 index 00000000000..fae880f81a1 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_account.ts @@ -0,0 +1,24 @@ +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerAccount( + rpcUrl: string, + privateKey: Fq, + partialAddress: Fr, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + + const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); + + log(`\nRegistered account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/register_recipient.ts b/yarn-project/cli/src/cmds/register_recipient.ts new file mode 100644 index 00000000000..e2b3aed2f16 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_recipient.ts @@ -0,0 +1,21 @@ +import { AztecAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { CompleteAddress } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerRecipient( + aztecAddress: AztecAddress, + publicKey: Point, + partialAddress: Fr, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.registerRecipient(CompleteAddress.create(aztecAddress, publicKey, partialAddress)); + log(`\nRegistered details for account with address: ${aztecAddress}\n`); +} diff --git a/yarn-project/cli/src/cmds/send.ts b/yarn-project/cli/src/cmds/send.ts new file mode 100644 index 00000000000..cb8c3bfb413 --- /dev/null +++ b/yarn-project/cli/src/cmds/send.ts @@ -0,0 +1,40 @@ +import { AztecAddress, Contract, Fq, Fr, getSchnorrAccount } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { prepTx } from '../utils.js'; + +/** + * + */ +export async function send( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + privateKey: Fq, + rpcUrl: string, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const wallet = await getSchnorrAccount(client, privateKey, privateKey, Fr.ZERO).getWallet(); + const contract = await Contract.at(contractAddress, contractArtifact, wallet); + const tx = contract.methods[functionName](...functionArgs).send(); + log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); + if (wait) { + await tx.wait(); + + log('Transaction has been mined'); + + const receipt = await tx.getReceipt(); + log(`Status: ${receipt.status}\n`); + log(`Block number: ${receipt.blockNumber}`); + log(`Block hash: ${receipt.blockHash?.toString('hex')}`); + } else { + log('Transaction pending. Check status with get-tx-receipt'); + } +} diff --git a/yarn-project/cli/src/cmds/unbox.ts b/yarn-project/cli/src/cmds/unbox.ts new file mode 100644 index 00000000000..b84694e2608 --- /dev/null +++ b/yarn-project/cli/src/cmds/unbox.ts @@ -0,0 +1,11 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { unboxContract } from '../unbox.js'; + +/** + * + */ +export async function unbox(contractName: string, localDirectory: string | undefined, cliVersion: string, log: LogFn) { + const unboxTo: string = localDirectory ? localDirectory : contractName; + await unboxContract(contractName, unboxTo, cliVersion, log); +} diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index ff63f7b5aa5..392d6bbc7ba 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -1,50 +1,16 @@ -import { - AztecAddress, - Contract, - ContractDeployer, - EthAddress, - Fr, - GrumpkinScalar, - Note, - generatePublicKey, - getSchnorrAccount, - isContractDeployed, -} from '@aztec/aztec.js'; -import { - FunctionSelector, - StructType, - decodeFunctionSignature, - decodeFunctionSignatureWithParameterNames, -} from '@aztec/foundation/abi'; -import { JsonStringify } from '@aztec/foundation/json-rpc'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { sleep } from '@aztec/foundation/sleep'; import { fileURLToPath } from '@aztec/foundation/url'; -import { compileNoir, generateNoirInterface, generateTypescriptInterface } from '@aztec/noir-compiler/cli'; -import { CompleteAddress, ContractData, ExtendedNote, LogFilter } from '@aztec/types'; +import { addNoirCompilerCommanderActions } from '@aztec/noir-compiler/cli'; -import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { Command, Option } from 'commander'; +import { resolve as dnsResolve } from 'dns'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; -import { format } from 'util'; -import { mnemonicToAccount } from 'viem/accounts'; - -import { createCompatibleClient } from './client.js'; -import { encodeArgs, parseStructString } from './encoding.js'; -import { GITHUB_TAG_PREFIX } from './github.js'; -import { unboxContract } from './unbox.js'; -import { update } from './update/update.js'; + import { - deployAztecContracts, - getContractArtifact, - getExampleContractArtifacts, - getFunctionArtifact, - getTxSender, parseAztecAddress, parseEthereumAddress, parseField, - parseFields, parseOptionalAztecAddress, parseOptionalInteger, parseOptionalLogId, @@ -55,12 +21,20 @@ import { parsePublicKey, parseSaltFromHexString, parseTxHash, - prepTx, -} from './utils.js'; +} from './parse_args.js'; -const accountCreationSalt = Fr.ZERO; +/** + * If we can successfully resolve 'host.docker.internal', then we are running in a container, and we should treat + * localhost as being host.docker.internal. + */ +function getLocalhost() { + return new Promise(resolve => + dnsResolve('host.docker.internal', err => (err ? resolve('localhost') : resolve('host.docker.internal'))), + ); +} -const { ETHEREUM_HOST = 'http://localhost:8545', PRIVATE_KEY, API_KEY } = process.env; +const LOCALHOST = await getLocalhost(); +const { ETHEREUM_HOST = `http://${LOCALHOST}:8545`, PRIVATE_KEY, API_KEY } = process.env; /** * Returns commander program that defines the CLI. @@ -79,7 +53,7 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { const pxeOption = new Option('-u, --rpc-url ', 'URL of the PXE') .env('PXE_URL') - .default('http://localhost:8080') + .default(`http://${LOCALHOST}:8080`) .makeOptionMandatory(true); const createPrivateKeyOption = (description: string, mandatory: boolean) => @@ -104,20 +78,15 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'test test test test test test test test test test test junk', ) .action(async options => { - const { l1ContractAddresses } = await deployAztecContracts( + const { deployL1Contracts } = await import('./cmds/deploy_l1_contracts.js'); + await deployL1Contracts( options.rpcUrl, options.apiKey ?? '', options.privateKey, options.mnemonic, + log, debugLogger, ); - log('\n'); - log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); - log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); - log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); - log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); - log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); - log('\n'); }); program @@ -130,20 +99,9 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { '-m, --mnemonic', 'An optional mnemonic string used for the private key generation. If not provided, random private key will be generated.', ) - .action(options => { - let privKey; - let publicKey; - if (options.mnemonic) { - const acc = mnemonicToAccount(options.mnemonic); - // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. - const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); - publicKey = generatePublicKey(key); - } else { - const key = GrumpkinScalar.random(); - privKey = key.toString(); - publicKey = generatePublicKey(key); - } - log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); + .action(async options => { + const { generatePrivateKey } = await import('./cmds/generate_private_key.js'); + generatePrivateKey(options.mnemonic, log); }); program @@ -151,10 +109,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .summary('Generates a LibP2P peer private key.') .description('Generates a private key that can be used for running a node on a LibP2P network.') .action(async () => { - const peerId = await createSecp256k1PeerId(); - const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); - log(`Private key: ${exportedPeerId}`); - log(`Peer Id: ${peerId}`); + const { generateP2PPrivateKey } = await import('./cmds/generate_p2p_private_key.js'); + await generateP2PPrivateKey(log); }); program @@ -171,28 +127,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async ({ rpcUrl, privateKey, wait }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); - - const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, accountCreationSalt); - const { address, publicKey, partialAddress } = account.getCompleteAddress(); - const tx = await account.deploy(); - const txHash = await tx.getTxHash(); - debugLogger(`Account contract tx sent with hash ${txHash}`); - if (wait) { - log(`\nWaiting for account contract deployment...`); - await tx.wait(); - } else { - log(`\nAccount deployment transaction hash: ${txHash}\n`); - } - - log(`\nNew account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - if (!privateKey) { - log(`Private key: ${actualPrivateKey.toString()}`); - } - log(`Partial address: ${partialAddress.toString()}`); + const { createAccount } = await import('./cmds/create_account.js'); + await createAccount(rpcUrl, privateKey, wait, debugLogger, log); }); program @@ -209,14 +145,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ rpcUrl, privateKey, partialAddress }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - - const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); - - log(`\nRegistered account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - log(`Partial address: ${partialAddress.toString()}`); + const { registerAccount } = await import('./cmds/register_account.js'); + await registerAccount(rpcUrl, privateKey, partialAddress, debugLogger, log); }); program @@ -248,58 +178,20 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async (artifactPath, { json, rpcUrl, publicKey, args: rawArgs, portalAddress, salt, wait }) => { - const contractArtifact = await getContractArtifact(artifactPath, log); - const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); - - const client = await createCompatibleClient(rpcUrl, debugLogger); - const nodeInfo = await client.getNodeInfo(); - const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; - if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { - log( - `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, - ); - } - - const deployer = new ContractDeployer(contractArtifact, client, publicKey); - - const constructor = getFunctionArtifact(contractArtifact, 'constructor'); - if (!constructor) { - throw new Error(`Constructor not found in contract ABI`); - } - - debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); - const args = encodeArgs(rawArgs, constructorArtifact!.parameters); - debugLogger(`Encoded arguments: ${args.join(', ')}`); - - const deploy = deployer.deploy(...args); - - await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); - const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); - const txHash = await tx.getTxHash(); - debugLogger(`Deploy tx sent with hash ${txHash}`); - if (wait) { - const deployed = await tx.wait(); - const { address, partialAddress } = deployed.contract.completeAddress; - if (json) { - logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); - } else { - log(`\nContract deployed at ${address.toString()}\n`); - log(`Contract partial address ${partialAddress.toString()}\n`); - } - } else { - const { address, partialAddress } = deploy.completeAddress ?? {}; - if (json) { - logJson({ - address: address?.toString() ?? 'N/A', - partialAddress: partialAddress?.toString() ?? 'N/A', - txHash: txHash.toString(), - }); - } else { - log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); - log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); - log(`Deployment transaction hash: ${txHash}\n`); - } - } + const { deploy } = await import('./cmds/deploy.js'); + await deploy( + artifactPath, + json, + rpcUrl, + publicKey, + rawArgs, + portalAddress, + salt, + wait, + debugLogger, + log, + logJson, + ); }); program @@ -312,14 +204,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const address = options.contractAddress; - const isDeployed = await isContractDeployed(client, address); - if (isDeployed) { - log(`\nContract found at ${address.toString()}\n`); - } else { - log(`\nNo contract found at ${address.toString()}\n`); - } + const { checkDeploy } = await import('./cmds/check_deploy.js'); + await checkDeploy(options.rpcUrl, options.contractAddress, debugLogger, log); }); program @@ -337,32 +223,27 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('--portal-address
', 'Optional address to a portal contract on L1', parseEthereumAddress) .addOption(pxeOption) .action(async options => { - const artifact = await getContractArtifact(options.contractArtifact, log); - const contractAddress: AztecAddress = options.contractAddress; - const completeAddress = new CompleteAddress( - contractAddress, - options.publicKey ?? Fr.ZERO, + const { addContract } = await import('./cmds/add_contract.js'); + await addContract( + options.rpcUrl, + options.contractArtifact, + options.contractAddress, options.partialAddress, + options.publicKey, + options.portalContract, + debugLogger, + log, ); - const portalContract: EthAddress = options.portalContract ?? EthAddress.ZERO; - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - - await client.addContracts([{ artifact, completeAddress, portalContract }]); - log(`\nContract added to PXE at ${contractAddress.toString()}\n`); }); + program .command('get-tx-receipt') .description('Gets the receipt for the specified transaction hash.') .argument('', 'A transaction hash to get the receipt for.', parseTxHash) .addOption(pxeOption) .action(async (txHash, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const receipt = await client.getTxReceipt(txHash); - if (!receipt) { - log(`No receipt found for transaction hash ${txHash.toString()}`); - } else { - log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); - } + const { getTxReceipt } = await import('./cmds/get_tx_receipt.js'); + await getTxReceipt(options.rpcUrl, txHash, debugLogger, log); }); program @@ -372,28 +253,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('-b, --include-bytecode ', "Include the contract's public function bytecode, if any.", false) .action(async (contractAddress, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const contractDataWithOrWithoutBytecode = options.includeBytecode - ? await client.getExtendedContractData(contractAddress) - : await client.getContractData(contractAddress); - - if (!contractDataWithOrWithoutBytecode) { - log(`No contract data found at ${contractAddress}`); - return; - } - let contractData: ContractData; - - if ('contractData' in contractDataWithOrWithoutBytecode) { - contractData = contractDataWithOrWithoutBytecode.contractData; - } else { - contractData = contractDataWithOrWithoutBytecode; - } - log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); - log(`Portal: ${contractData.portalContractAddress.toString()}`); - if ('bytecode' in contractDataWithOrWithoutBytecode) { - log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); - } - log('\n'); + const { getContractData } = await import('./cmds/get_contract_data.js'); + await getContractData(options.rpcUrl, contractAddress, options.includeBytecode, debugLogger, log); }); program @@ -412,55 +273,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--follow', 'If set, will keep polling for new logs until interrupted.') .action(async ({ txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow }) => { - const pxe = await createCompatibleClient(rpcUrl, debugLogger); - - if (follow) { - if (txHash) { - throw Error('Cannot use --follow with --tx-hash'); - } - if (toBlock) { - throw Error('Cannot use --follow with --to-block'); - } - } - - const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; - - const fetchLogs = async () => { - const response = await pxe.getUnencryptedLogs(filter); - const logs = response.logs; - - if (!logs.length) { - const filterOptions = Object.entries(filter) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => `${key}: ${value}`) - .join(', '); - if (!follow) { - log(`No logs found for filter: {${filterOptions}}`); - } - } else { - if (!follow && !filter.afterLog) { - log('Logs found: \n'); - } - logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); - // Set the continuation parameter for the following requests - filter.afterLog = logs[logs.length - 1].id; - } - return response.maxLogsHit; - }; - - if (follow) { - log('Fetching logs...'); - while (true) { - const maxLogsHit = await fetchLogs(); - if (!maxLogsHit) { - await sleep(1000); - } - } - } else { - while (await fetchLogs()) { - // Keep fetching logs until we reach the end. - } - } + const { getLogs } = await import('./cmds/get_logs.js'); + await getLogs(txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow, debugLogger, log); }); program @@ -475,9 +289,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ address, publicKey, partialAddress, rpcUrl }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - await client.registerRecipient(CompleteAddress.create(address, publicKey, partialAddress)); - log(`\nRegistered details for account with address: ${address}\n`); + const { registerRecipient } = await import('./cmds/register_recipient.js'); + await registerRecipient(address, publicKey, partialAddress, rpcUrl, debugLogger, log); }); program @@ -485,16 +298,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the Aztec accounts stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const accounts = await client.getRegisteredAccounts(); - if (!accounts.length) { - log('No accounts found.'); - } else { - log(`Accounts found: \n`); - for (const account of accounts) { - log(account.toReadableString()); - } - } + const { getAccounts } = await import('./cmds/get_accounts.js'); + await getAccounts(options.rpcUrl, debugLogger, log); }); program @@ -503,14 +308,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get account for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const account = await client.getRegisteredAccount(address); - - if (!account) { - log(`Unknown account ${address.toString()}`); - } else { - log(account.toReadableString()); - } + const { getAccount } = await import('./cmds/get_account.js'); + await getAccount(address, options.rpcUrl, debugLogger, log); }); program @@ -518,16 +317,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the recipients stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipients = await client.getRecipients(); - if (!recipients.length) { - log('No recipients found.'); - } else { - log(`Recipients found: \n`); - for (const recipient of recipients) { - log(recipient.toReadableString()); - } - } + const { getRecipients } = await import('./cmds/get_recipients.js'); + await getRecipients(options.rpcUrl, debugLogger, log); }); program @@ -536,14 +327,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get recipient for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipient = await client.getRecipient(address); - - if (!recipient) { - log(`Unknown recipient ${address.toString()}`); - } else { - log(recipient.toReadableString()); - } + const { getRecipient } = await import('./cmds/get_recipient.js'); + await getRecipient(address, options.rpcUrl, debugLogger, log); }); program @@ -560,31 +345,18 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--no-wait', 'Print transaction hash without waiting for it to be mined') .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { send } = await import('./cmds/send.js'); + await send( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.privateKey, + options.rpcUrl, + !options.noWait, + debugLogger, log, ); - const { contractAddress, privateKey } = options; - - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const wallet = await getSchnorrAccount(client, privateKey, privateKey, accountCreationSalt).getWallet(); - const contract = await Contract.at(contractAddress, contractArtifact, wallet); - const tx = contract.methods[functionName](...functionArgs).send(); - log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); - if (options.wait) { - await tx.wait(); - - log('Transaction has been mined'); - - const receipt = await tx.getReceipt(); - log(`Status: ${receipt.status}\n`); - log(`Block number: ${receipt.blockNumber}`); - log(`Block hash: ${receipt.blockHash?.toString('hex')}`); - } else { - log('Transaction pending. Check status with get-tx-receipt'); - } }); program @@ -602,23 +374,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('-f, --from ', 'Aztec address of the caller. If empty, will use the first account from RPC.') .addOption(pxeOption) .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { call } = await import('./cmds/call.js'); + await call( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.from, + options.rpcUrl, + debugLogger, log, ); - - const fnArtifact = getFunctionArtifact(contractArtifact, functionName); - if (fnArtifact.parameters.length !== options.args.length) { - throw Error( - `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${options.args.length}`, - ); - } - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const from = await getTxSender(client, options.from); - const result = await client.viewTx(functionName, functionArgs, options.contractAddress, from); - log(format('\nView result: ', result, '\n')); }); program @@ -631,10 +397,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .requiredOption('-n, --note [note...]', 'The members of a Note serialized as hex strings.', []) .addOption(pxeOption) .action(async (address, contractAddress, storageSlot, txHash, options) => { - const note = new Note(parseFields(options.note)); - const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - await client.addNote(extendedNote); + const { addNote } = await import('./cmds/add_note.js'); + await addNote(address, contractAddress, storageSlot, txHash, options.note, options.rpcUrl, debugLogger); }); // Helper for users to decode hex strings into structs if needed. @@ -648,17 +412,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .requiredOption('-p, --parameter ', 'The name of the struct parameter to decode into') .action(async (encodedString, options) => { - const contractArtifact = await getContractArtifact(options.contractArtifact, log); - const parameterAbitype = contractArtifact.functions - .map(({ parameters }) => parameters) - .flat() - .find(({ name, type }) => name === options.parameter && type.kind === 'struct'); - if (!parameterAbitype) { - log(`No struct parameter found with name ${options.parameter}`); - return; - } - const data = parseStructString(encodedString, parameterAbitype.type as StructType); - log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); + const { parseParameterStruct } = await import('./cmds/parse_parameter_struct.js'); + await parseParameterStruct(encodedString, options.contractArtifact, options.parameter, log); }); program @@ -666,18 +421,16 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the current Aztec L2 block number.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const num = await client.getBlockNumber(); - log(`${num}\n`); + const { blockNumber } = await import('./cmds/block_number.js'); + await blockNumber(options.rpcUrl, debugLogger, log); }); program .command('example-contracts') .description('Lists the example contracts available to deploy from @aztec/noir-contracts') .action(async () => { - const abisList = await getExampleContractArtifacts(); - const names = Object.keys(abisList); - names.forEach(name => log(name)); + const { exampleContracts } = await import('./cmds/example_contracts.js'); + await exampleContracts(log); }); program @@ -691,8 +444,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'Local directory to unbox source folder to (relative or absolute), optional - defaults to `/`', ) .action(async (contractName, localDirectory) => { - const unboxTo: string = localDirectory ? localDirectory : contractName; - await unboxContract(contractName, unboxTo, cliVersion, log); + const { unbox } = await import('./cmds/unbox.js'); + await unbox(contractName, localDirectory, cliVersion, log); }); program @@ -700,14 +453,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the information of an aztec node at a URL.') .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const info = await client.getNodeInfo(); - log(`\nNode Info:\n`); - log(`Sandbox Version: ${info.sandboxVersion}\n`); - log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); - log(`Chain Id: ${info.chainId}\n`); - log(`Protocol Version: ${info.protocolVersion}\n`); - log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); + const { getNodeInfo } = await import('./cmds/get_node_info.js'); + await getNodeInfo(options.rpcUrl, debugLogger, log); }); program @@ -718,30 +465,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { `A compiled Noir contract's artifact in JSON format or name of a contract artifact exported by @aztec/noir-contracts`, ) .action(async (contractArtifactFile: string) => { - const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); - const contractFns = contractArtifact.functions.filter( - f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', - ); - if (contractFns.length === 0) { - log(`No external functions found for contract ${contractArtifact.name}`); - } - for (const fn of contractFns) { - const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); - const signature = decodeFunctionSignature(fn.name, fn.parameters); - const selector = FunctionSelector.fromSignature(signature); - log( - `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, - ); - } + const { inspectContract } = await import('./cmds/inspect_contract.js'); + await inspectContract(contractArtifactFile, debugLogger, log); }); program .command('compute-selector') .description('Given a function signature, it computes a selector') .argument('', 'Function signature to compute selector for e.g. foo(Field)') - .action((functionSignature: string) => { - const selector = FunctionSelector.fromSignature(functionSignature); - log(`${selector}`); + .action(async (functionSignature: string) => { + const { computeSelector } = await import('./cmds/compute_selector.js'); + computeSelector(functionSignature, log); }); program @@ -753,12 +487,11 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .action(async (projectPath: string, options) => { const { contract } = options; + const { update } = await import('./update/update.js'); await update(projectPath, contract, options.rpcUrl, options.aztecVersion, log); }); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'generate-typescript', log); - generateNoirInterface(program, 'generate-noir-interface', log); + addNoirCompilerCommanderActions(program, log); return program; } diff --git a/yarn-project/cli/src/parse_args.ts b/yarn-project/cli/src/parse_args.ts new file mode 100644 index 00000000000..e2641005cd2 --- /dev/null +++ b/yarn-project/cli/src/parse_args.ts @@ -0,0 +1,248 @@ +import { FunctionSelector } from '@aztec/aztec.js/abi'; +import { AztecAddress } from '@aztec/aztec.js/aztec_address'; +import { EthAddress } from '@aztec/aztec.js/eth_address'; +import { Fr, GrumpkinScalar, Point } from '@aztec/aztec.js/fields'; +import { LogId } from '@aztec/aztec.js/log_id'; +import { TxHash } from '@aztec/aztec.js/tx_hash'; + +import { InvalidArgumentError } from 'commander'; + +/** + * Removes the leading 0x from a hex string. If no leading 0x is found the string is returned unchanged. + * @param hex - A hex string + * @returns A new string with leading 0x removed + */ +const stripLeadingHex = (hex: string) => { + if (hex.length > 2 && hex.startsWith('0x')) { + return hex.substring(2); + } + return hex; +}; + +/** + * Parses a hex encoded string to an Fr integer to be used as salt + * @param str - Hex encoded string + * @returns A integer to be used as salt + */ +export function parseSaltFromHexString(str: string): Fr { + const hex = stripLeadingHex(str); + + // ensure it's a hex string + if (!hex.match(/^[0-9a-f]+$/i)) { + throw new InvalidArgumentError('Invalid hex string'); + } + + // pad it so that we may read it as a buffer. + // Buffer needs _exactly_ two hex characters per byte + const padded = hex.length % 2 === 1 ? '0' + hex : hex; + + // finally, turn it into an integer + return Fr.fromBuffer(Buffer.from(padded, 'hex')); +} + +/** + * Parses an AztecAddress from a string. + * @param address - A serialized Aztec address + * @returns An Aztec address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseAztecAddress(address: string): AztecAddress { + try { + return AztecAddress.fromString(address); + } catch { + throw new InvalidArgumentError(`Invalid address: ${address}`); + } +} + +/** + * Parses an Ethereum address from a string. + * @param address - A serialized Ethereum address + * @returns An Ethereum address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseEthereumAddress(address: string): EthAddress { + try { + return EthAddress.fromString(address); + } catch { + throw new InvalidArgumentError(`Invalid address: ${address}`); + } +} + +/** + * Parses an AztecAddress from a string. + * @param address - A serialized Aztec address + * @returns An Aztec address + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalAztecAddress(address: string): AztecAddress | undefined { + if (!address) { + return undefined; + } + return parseAztecAddress(address); +} + +/** + * Parses an optional log ID string into a LogId object. + * + * @param logId - The log ID string to parse. + * @returns The parsed LogId object, or undefined if the log ID is missing or empty. + */ +export function parseOptionalLogId(logId: string): LogId | undefined { + if (!logId) { + return undefined; + } + return LogId.fromString(logId); +} + +/** + * Parses a selector from a string. + * @param selector - A serialized selector. + * @returns A selector. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalSelector(selector: string): FunctionSelector | undefined { + if (!selector) { + return undefined; + } + try { + return FunctionSelector.fromString(selector); + } catch { + throw new InvalidArgumentError(`Invalid selector: ${selector}`); + } +} + +/** + * Parses a string into an integer or returns undefined if the input is falsy. + * + * @param value - The string to parse into an integer. + * @returns The parsed integer, or undefined if the input string is falsy. + * @throws If the input is not a valid integer. + */ +export function parseOptionalInteger(value: string): number | undefined { + if (!value) { + return undefined; + } + const parsed = Number(value); + if (!Number.isInteger(parsed)) { + throw new InvalidArgumentError('Invalid integer.'); + } + return parsed; +} + +/** + * Parses a TxHash from a string. + * @param txHash - A transaction hash + * @returns A TxHash instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseTxHash(txHash: string): TxHash { + try { + return TxHash.fromString(txHash); + } catch { + throw new InvalidArgumentError(`Invalid transaction hash: ${txHash}`); + } +} + +/** + * Parses an optional TxHash from a string. + * Calls parseTxHash internally. + * @param txHash - A transaction hash + * @returns A TxHash instance, or undefined if the input string is falsy. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseOptionalTxHash(txHash: string): TxHash | undefined { + if (!txHash) { + return undefined; + } + return parseTxHash(txHash); +} + +/** + * Parses a public key from a string. + * @param publicKey - A public key + * @returns A Point instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePublicKey(publicKey: string): Point { + try { + return Point.fromString(publicKey); + } catch (err) { + throw new InvalidArgumentError(`Invalid public key: ${publicKey}`); + } +} + +/** + * Parses a partial address from a string. + * @param address - A partial address + * @returns A Fr instance + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePartialAddress(address: string): Fr { + try { + return Fr.fromString(address); + } catch (err) { + throw new InvalidArgumentError(`Invalid partial address: ${address}`); + } +} + +/** + * Parses a private key from a string. + * @param privateKey - A string + * @returns A private key + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parsePrivateKey(privateKey: string): GrumpkinScalar { + try { + const value = GrumpkinScalar.fromString(privateKey); + // most likely a badly formatted key was passed + if (value.isZero()) { + throw new Error('Private key must not be zero'); + } + + return value; + } catch (err) { + throw new InvalidArgumentError(`Invalid private key: ${privateKey}`); + } +} + +/** + * Parses a field from a string. + * @param field - A string representing the field. + * @returns A field. + * @throws InvalidArgumentError if the input string is not valid. + */ +export function parseField(field: string): Fr { + try { + const isHex = field.startsWith('0x') || field.match(new RegExp(`^[0-9a-f]{${Fr.SIZE_IN_BYTES * 2}}$`, 'i')); + if (isHex) { + return Fr.fromString(field); + } + + if (['true', 'false'].includes(field)) { + return new Fr(field === 'true'); + } + + const isNumber = +field || field === '0'; + if (isNumber) { + return new Fr(BigInt(field)); + } + + const isBigInt = field.endsWith('n'); + if (isBigInt) { + return new Fr(BigInt(field.replace(/n$/, ''))); + } + + return new Fr(BigInt(field)); + } catch (err) { + throw new InvalidArgumentError(`Invalid field: ${field}`); + } +} + +/** + * Parses an array of strings to Frs. + * @param fields - An array of strings representing the fields. + * @returns An array of Frs. + */ +export function parseFields(fields: string[]): Fr[] { + return fields.map(parseField); +} diff --git a/yarn-project/cli/src/test/utils.test.ts b/yarn-project/cli/src/test/utils.test.ts index e465138083b..d0c9ff1b7ef 100644 --- a/yarn-project/cli/src/test/utils.test.ts +++ b/yarn-project/cli/src/test/utils.test.ts @@ -5,7 +5,8 @@ import { InvalidArgumentError } from 'commander'; import { MockProxy, mock } from 'jest-mock-extended'; import { encodeArgs } from '../encoding.js'; -import { getTxSender, parseSaltFromHexString, stripLeadingHex } from '../utils.js'; +import { parseSaltFromHexString } from '../parse_args.js'; +import { getTxSender, stripLeadingHex } from '../utils.js'; import { mockContractArtifact } from './mocks.js'; describe('CLI Utils', () => { diff --git a/yarn-project/cli/src/update/update.ts b/yarn-project/cli/src/update/update.ts index f0f8429b934..66ff2d59d57 100644 --- a/yarn-project/cli/src/update/update.ts +++ b/yarn-project/cli/src/update/update.ts @@ -10,7 +10,6 @@ import { updateAztecNr } from './noir.js'; import { getNewestVersion, updateAztecDeps, updateLockfile } from './npm.js'; const AZTECJS_PACKAGE = '@aztec/aztec.js'; -const UPDATE_DOCS_URL = 'https://docs.aztec.network/dev_docs/updating'; export async function update( projectPath: string, diff --git a/yarn-project/cli/src/utils.ts b/yarn-project/cli/src/utils.ts index dc4f8732617..40867e8fbb0 100644 --- a/yarn-project/cli/src/utils.ts +++ b/yarn-project/cli/src/utils.ts @@ -1,24 +1,11 @@ -import { AztecAddress, EthAddress, Fr, FunctionSelector, GrumpkinScalar, PXE, Point, TxHash } from '@aztec/aztec.js'; -import { L1ContractArtifactsForDeployment, createEthereumChain, deployL1Contracts } from '@aztec/ethereum'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { type ContractArtifact, type FunctionArtifact } from '@aztec/aztec.js/abi'; +import { AztecAddress } from '@aztec/aztec.js/aztec_address'; +import { type L1ContractArtifactsForDeployment } from '@aztec/aztec.js/ethereum'; +import { type PXE } from '@aztec/aztec.js/interfaces/pxe'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { - ContractDeploymentEmitterAbi, - ContractDeploymentEmitterBytecode, - InboxAbi, - InboxBytecode, - OutboxAbi, - OutboxBytecode, - RegistryAbi, - RegistryBytecode, - RollupAbi, - RollupBytecode, -} from '@aztec/l1-artifacts'; -import { LogId } from '@aztec/types'; import { CommanderError, InvalidArgumentError } from 'commander'; import { readFile, rename, writeFile } from 'fs/promises'; -import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; import { encodeArgs } from './encoding.js'; @@ -35,7 +22,7 @@ interface ArtifactsType { * @param fnName - Function name to be found. * @returns The function's ABI. */ -export function getFunctionArtifact(artifact: ContractArtifact, fnName: string) { +export function getFunctionArtifact(artifact: ContractArtifact, fnName: string): FunctionArtifact { const fn = artifact.functions.find(({ name }) => name === fnName); if (!fn) { throw Error(`Function ${fnName} not found in contract ABI.`); @@ -57,6 +44,21 @@ export async function deployAztecContracts( mnemonic: string, debugLogger: DebugLogger, ) { + const { + ContractDeploymentEmitterAbi, + ContractDeploymentEmitterBytecode, + InboxAbi, + InboxBytecode, + OutboxAbi, + OutboxBytecode, + RegistryAbi, + RegistryBytecode, + RollupAbi, + RollupBytecode, + } = await import('@aztec/l1-artifacts'); + const { createEthereumChain, deployL1Contracts } = await import('@aztec/ethereum'); + const { mnemonicToAccount, privateKeyToAccount } = await import('viem/accounts'); + const account = !privateKey ? mnemonicToAccount(mnemonic!) : privateKeyToAccount(`0x${privateKey}`); const chain = createEthereumChain(rpcUrl, apiKey); const l1Artifacts: L1ContractArtifactsForDeployment = { @@ -176,234 +178,6 @@ export const stripLeadingHex = (hex: string) => { return hex; }; -/** - * Parses a hex encoded string to an Fr integer to be used as salt - * @param str - Hex encoded string - * @returns A integer to be used as salt - */ -export function parseSaltFromHexString(str: string): Fr { - const hex = stripLeadingHex(str); - - // ensure it's a hex string - if (!hex.match(/^[0-9a-f]+$/i)) { - throw new InvalidArgumentError('Invalid hex string'); - } - - // pad it so that we may read it as a buffer. - // Buffer needs _exactly_ two hex characters per byte - const padded = hex.length % 2 === 1 ? '0' + hex : hex; - - // finally, turn it into an integer - return Fr.fromBuffer(Buffer.from(padded, 'hex')); -} - -/** - * Parses an AztecAddress from a string. - * @param address - A serialized Aztec address - * @returns An Aztec address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseAztecAddress(address: string): AztecAddress { - try { - return AztecAddress.fromString(address); - } catch { - throw new InvalidArgumentError(`Invalid address: ${address}`); - } -} - -/** - * Parses an Ethereum address from a string. - * @param address - A serialized Ethereum address - * @returns An Ethereum address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseEthereumAddress(address: string): EthAddress { - try { - return EthAddress.fromString(address); - } catch { - throw new InvalidArgumentError(`Invalid address: ${address}`); - } -} - -/** - * Parses an AztecAddress from a string. - * @param address - A serialized Aztec address - * @returns An Aztec address - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalAztecAddress(address: string): AztecAddress | undefined { - if (!address) { - return undefined; - } - return parseAztecAddress(address); -} - -/** - * Parses an optional log ID string into a LogId object. - * - * @param logId - The log ID string to parse. - * @returns The parsed LogId object, or undefined if the log ID is missing or empty. - */ -export function parseOptionalLogId(logId: string): LogId | undefined { - if (!logId) { - return undefined; - } - return LogId.fromString(logId); -} - -/** - * Parses a selector from a string. - * @param selector - A serialized selector. - * @returns A selector. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalSelector(selector: string): FunctionSelector | undefined { - if (!selector) { - return undefined; - } - try { - return FunctionSelector.fromString(selector); - } catch { - throw new InvalidArgumentError(`Invalid selector: ${selector}`); - } -} - -/** - * Parses a string into an integer or returns undefined if the input is falsy. - * - * @param value - The string to parse into an integer. - * @returns The parsed integer, or undefined if the input string is falsy. - * @throws If the input is not a valid integer. - */ -export function parseOptionalInteger(value: string): number | undefined { - if (!value) { - return undefined; - } - const parsed = Number(value); - if (!Number.isInteger(parsed)) { - throw new InvalidArgumentError('Invalid integer.'); - } - return parsed; -} - -/** - * Parses a TxHash from a string. - * @param txHash - A transaction hash - * @returns A TxHash instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseTxHash(txHash: string): TxHash { - try { - return TxHash.fromString(txHash); - } catch { - throw new InvalidArgumentError(`Invalid transaction hash: ${txHash}`); - } -} - -/** - * Parses an optional TxHash from a string. - * Calls parseTxHash internally. - * @param txHash - A transaction hash - * @returns A TxHash instance, or undefined if the input string is falsy. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseOptionalTxHash(txHash: string): TxHash | undefined { - if (!txHash) { - return undefined; - } - return parseTxHash(txHash); -} - -/** - * Parses a public key from a string. - * @param publicKey - A public key - * @returns A Point instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePublicKey(publicKey: string): Point { - try { - return Point.fromString(publicKey); - } catch (err) { - throw new InvalidArgumentError(`Invalid public key: ${publicKey}`); - } -} - -/** - * Parses a partial address from a string. - * @param address - A partial address - * @returns A Fr instance - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePartialAddress(address: string): Fr { - try { - return Fr.fromString(address); - } catch (err) { - throw new InvalidArgumentError(`Invalid partial address: ${address}`); - } -} - -/** - * Parses a private key from a string. - * @param privateKey - A string - * @returns A private key - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parsePrivateKey(privateKey: string): GrumpkinScalar { - try { - const value = GrumpkinScalar.fromString(privateKey); - // most likely a badly formatted key was passed - if (value.isZero()) { - throw new Error('Private key must not be zero'); - } - - return value; - } catch (err) { - throw new InvalidArgumentError(`Invalid private key: ${privateKey}`); - } -} - -/** - * Parses a field from a string. - * @param field - A string representing the field. - * @returns A field. - * @throws InvalidArgumentError if the input string is not valid. - */ -export function parseField(field: string): Fr { - try { - const isHex = field.startsWith('0x') || field.match(new RegExp(`^[0-9a-f]{${Fr.SIZE_IN_BYTES * 2}}$`, 'i')); - if (isHex) { - return Fr.fromString(field); - } - - if (['true', 'false'].includes(field)) { - return new Fr(field === 'true'); - } - - const isNumber = +field || field === '0'; - if (isNumber) { - return new Fr(BigInt(field)); - } - - const isBigInt = field.endsWith('n'); - if (isBigInt) { - return new Fr(BigInt(field.replace(/n$/, ''))); - } - - return new Fr(BigInt(field)); - } catch (err) { - throw new InvalidArgumentError(`Invalid field: ${field}`); - } -} - -/** - * Parses an array of strings to Frs. - * @param fields - An array of strings representing the fields. - * @returns An array of Frs. - */ -export function parseFields(fields: string[]): Fr[] { - return fields.map(parseField); -} - /** * Updates a file in place atomically. * @param filePath - Path to file diff --git a/yarn-project/deploy_npm.sh b/yarn-project/deploy_npm.sh index e17e69540a6..0f47743f6d4 100755 --- a/yarn-project/deploy_npm.sh +++ b/yarn-project/deploy_npm.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu @@ -7,12 +7,10 @@ if [ -z "$COMMIT_TAG" ]; then exit 0 fi -extract_repo yarn-project /usr/src project +extract_repo yarn-project-prod /usr/src project cd project/src/yarn-project echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >.npmrc -# also copy npcrc into the l1-contracts directory -cp .npmrc ../l1-contracts # This is to be used with the 'canary' tag for testing, and then 'latest' for making it public DIST_TAG=${1:-"latest"} @@ -70,12 +68,8 @@ function deploy_package() { fi fi - # Back to root - if [ "$REPOSITORY" == "../l1-contracts" ]; then - cd ../yarn-project - else - cd .. - fi + # Return to root + cd .. } # New packages here should be added after the last package that they depend on @@ -97,4 +91,3 @@ deploy_package p2p deploy_package world-state deploy_package sequencer-client deploy_package aztec-node -deploy_package ../l1-contracts diff --git a/yarn-project/end-to-end/Dockerfile b/yarn-project/end-to-end/Dockerfile index e3e8c489526..d65eda2b13a 100644 --- a/yarn-project/end-to-end/Dockerfile +++ b/yarn-project/end-to-end/Dockerfile @@ -10,7 +10,7 @@ RUN yarn workspaces focus --production && yarn cache clean # Create final, minimal size image. # TODO: Not very minimal as chromium adds about 500MB of bloat :/ Separate or install at test runtime? -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache \ jq \ bash \ diff --git a/yarn-project/end-to-end/scripts/setup_canary.sh b/yarn-project/end-to-end/scripts/setup_canary.sh index 2d4cd7fa2ef..14d7e176652 100755 --- a/yarn-project/end-to-end/scripts/setup_canary.sh +++ b/yarn-project/end-to-end/scripts/setup_canary.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu TARGET_PKGS_FILE=$1 diff --git a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts index d2b5d7aac34..56807979809 100644 --- a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts @@ -46,7 +46,8 @@ describe('e2e_inclusion_proofs_contract', () => { { // Prove note inclusion in a given block. - // We prove the note existence at current block number because we don't currently have historical data + // TODO: Use here note block number from the creation note tx to test archival node. This is currently not + // possible because of issue #3564 const blockNumber = await pxe.getBlockNumber(); const ignoredCommitment = 0; // Not ignored only when the note doesn't exist await contract.methods.proveNoteInclusion(owner, blockNumber, ignoredCommitment).send().wait(); @@ -54,7 +55,8 @@ describe('e2e_inclusion_proofs_contract', () => { { // Prove that the note has not been nullified - // We prove the note existence at current block number because we don't currently have historical data + // TODO: Use here note block number from the creation note tx to test archival node. This is currently not + // possible because of issue #3564 const blockNumber = await pxe.getBlockNumber(); const ignoredNullifier = 0; // Not ignored only when the note doesn't exist await contract.methods.proveNullifierNonInclusion(owner, blockNumber, ignoredNullifier).send().wait(); diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index bbfe52161c6..23f15ef2d44 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -52,7 +52,5 @@ "path": "../world-state" } ], - "include": [ - "src" - ] + "include": ["src"] } diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index 372c031ef39..e6e8c8e66c3 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -28,6 +28,7 @@ "./sleep": "./dest/sleep/index.js", "./timer": "./dest/timer/index.js", "./transport": "./dest/transport/index.js", + "./trees": "./dest/trees/index.js", "./wasm": "./dest/wasm/index.js", "./worker": "./dest/worker/index.js", "./bigint-buffer": "./dest/bigint-buffer/index.js", diff --git a/yarn-project/foundation/src/abi/abi_coder.ts b/yarn-project/foundation/src/abi/abi_coder.ts index 6971b423757..a702e65153a 100644 --- a/yarn-project/foundation/src/abi/abi_coder.ts +++ b/yarn-project/foundation/src/abi/abi_coder.ts @@ -1,4 +1,4 @@ -import { ABIType } from '@aztec/foundation/abi'; +import { type ABIType } from './abi.js'; /** * Get the size of an ABI type in field elements. diff --git a/yarn-project/foundation/src/abi/decoder.ts b/yarn-project/foundation/src/abi/decoder.ts index 9ea69388f76..cd37ba361e5 100644 --- a/yarn-project/foundation/src/abi/decoder.ts +++ b/yarn-project/foundation/src/abi/decoder.ts @@ -1,5 +1,5 @@ -import { ABIParameter, ABIType, ABIVariable, FunctionArtifact } from '@aztec/foundation/abi'; -import { Fr } from '@aztec/foundation/fields'; +import { Fr } from '../fields/index.js'; +import { ABIParameter, type ABIType, ABIVariable, FunctionArtifact } from './abi.js'; /** * The type of our decoded ABI. diff --git a/yarn-project/foundation/src/abi/encoder.ts b/yarn-project/foundation/src/abi/encoder.ts index 2e4b1844060..a4db8e24230 100644 --- a/yarn-project/foundation/src/abi/encoder.ts +++ b/yarn-project/foundation/src/abi/encoder.ts @@ -1,6 +1,6 @@ -import { ABIType, FunctionAbi, isAddressStruct } from '@aztec/foundation/abi'; - import { Fr } from '../fields/index.js'; +import { ABIType, FunctionAbi } from './abi.js'; +import { isAddressStruct } from './utils.js'; /** * Encodes arguments for a function call. diff --git a/yarn-project/foundation/src/abi/function_selector.ts b/yarn-project/foundation/src/abi/function_selector.ts index b898519d53e..de0b879cf6b 100644 --- a/yarn-project/foundation/src/abi/function_selector.ts +++ b/yarn-project/foundation/src/abi/function_selector.ts @@ -1,9 +1,11 @@ -import { ABIParameter, decodeFunctionSignature } from '@aztec/foundation/abi'; import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { keccak } from '@aztec/foundation/crypto'; -import { Fr } from '@aztec/foundation/fields'; import { BufferReader } from '@aztec/foundation/serialize'; +import { keccak } from '../crypto/keccak/index.js'; +import { Fr } from '../fields/index.js'; +import { ABIParameter } from './abi.js'; +import { decodeFunctionSignature } from './decoder.js'; + /** * A function selector is the first 4 bytes of the hash of a function signature. */ @@ -120,7 +122,7 @@ export class FunctionSelector { static fromString(selector: string) { const buf = Buffer.from(selector.replace(/^0x/i, ''), 'hex'); if (buf.length !== FunctionSelector.SIZE) { - throw new Error(`Invalid length ${buf.length}.`); + throw new Error(`Invalid FunctionSelector length ${buf.length}.`); } return FunctionSelector.fromBuffer(buf); } diff --git a/yarn-project/foundation/src/abi/utils.ts b/yarn-project/foundation/src/abi/utils.ts index b2ee62d2dd5..d7d15a4d94a 100644 --- a/yarn-project/foundation/src/abi/utils.ts +++ b/yarn-project/foundation/src/abi/utils.ts @@ -1,4 +1,4 @@ -import { ABIType } from './abi.js'; +import { type ABIType } from './abi.js'; /** * Returns whether the ABI type is an Aztec or Ethereum Address defined in Aztec.nr. diff --git a/yarn-project/foundation/src/aztec-address/index.ts b/yarn-project/foundation/src/aztec-address/index.ts index 4c042ad96c6..a03257f36de 100644 --- a/yarn-project/foundation/src/aztec-address/index.ts +++ b/yarn-project/foundation/src/aztec-address/index.ts @@ -10,7 +10,7 @@ import { Fr } from '../fields/index.js'; export class AztecAddress extends Fr { constructor(buffer: Buffer) { if (buffer.length !== 32) { - throw new Error(`Invalid length ${buffer.length}.`); + throw new Error(`Invalid AztecAddress length ${buffer.length}.`); } super(buffer); } diff --git a/yarn-project/foundation/src/crypto/index.ts b/yarn-project/foundation/src/crypto/index.ts index 98abed1dacd..f574fb4d2f5 100644 --- a/yarn-project/foundation/src/crypto/index.ts +++ b/yarn-project/foundation/src/crypto/index.ts @@ -1,4 +1,16 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + export * from './keccak/index.js'; export * from './random/index.js'; export * from './sha256/index.js'; export * from './pedersen/index.js'; + +/** + * Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. + * It takes about 100-200ms to initialize. It may not seem like much, but when in conjunction with many other things + * initializing, developers may want to pick precisely when to incur this cost. + * If in a test environment, we'll just do it on module load. + */ +export async function init() { + await BarretenbergSync.initSingleton(); +} diff --git a/yarn-project/foundation/src/crypto/pedersen/index.test.ts b/yarn-project/foundation/src/crypto/pedersen/index.test.ts index 1d152a917d0..412b07ec962 100644 --- a/yarn-project/foundation/src/crypto/pedersen/index.test.ts +++ b/yarn-project/foundation/src/crypto/pedersen/index.test.ts @@ -1,7 +1,13 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + import { toBufferBE } from '../../bigint-buffer/index.js'; import { pedersenCommit, pedersenHash, pedersenHashBuffer } from './index.js'; describe('pedersen', () => { + beforeAll(async () => { + await BarretenbergSync.initSingleton(); + }); + it('pedersen commit', () => { const r = pedersenCommit([toBufferBE(1n, 32), toBufferBE(1n, 32)]); expect(r).toEqual([ diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 2a117ea5519..6793b368c8f 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -1,9 +1,5 @@ import { BarretenbergSync, Fr } from '@aztec/bb.js'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * Create a pedersen commitment (point) from an array of input fields. * Left pads any inputs less than 32 bytes. @@ -13,7 +9,7 @@ export function pedersenCommit(input: Buffer[]) { throw new Error('All input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const point = api.pedersenCommit(input.map(i => new Fr(i))); + const point = BarretenbergSync.getSingleton().pedersenCommit(input.map(i => new Fr(i))); // toBuffer returns Uint8Arrays (browser/worker-boundary friendly). // TODO: rename toTypedArray()? return [Buffer.from(point.x.toBuffer()), Buffer.from(point.y.toBuffer())]; @@ -29,7 +25,7 @@ export function pedersenHash(input: Buffer[], index = 0) { } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); return Buffer.from( - api + BarretenbergSync.getSingleton() .pedersenHash( input.map(i => new Fr(i)), index, @@ -42,5 +38,5 @@ export function pedersenHash(input: Buffer[], index = 0) { * Create a pedersen hash from an arbitrary length buffer. */ export function pedersenHashBuffer(input: Buffer, index = 0) { - return Buffer.from(api.pedersenHashBuffer(input, index).toBuffer()); + return Buffer.from(BarretenbergSync.getSingleton().pedersenHashBuffer(input, index).toBuffer()); } diff --git a/yarn-project/foundation/src/eth-address/index.ts b/yarn-project/foundation/src/eth-address/index.ts index 2571f09790c..76587ecab26 100644 --- a/yarn-project/foundation/src/eth-address/index.ts +++ b/yarn-project/foundation/src/eth-address/index.ts @@ -1,4 +1,5 @@ -import { keccak256String, randomBytes } from '../crypto/index.js'; +import { keccak256String } from '../crypto/keccak/index.js'; +import { randomBytes } from '../crypto/random/index.js'; import { Fr } from '../fields/index.js'; import { BufferReader } from '../serialize/index.js'; diff --git a/yarn-project/foundation/src/fields/fields.ts b/yarn-project/foundation/src/fields/fields.ts index 591b57f6892..90600643cfe 100644 --- a/yarn-project/foundation/src/fields/fields.ts +++ b/yarn-project/foundation/src/fields/fields.ts @@ -1,5 +1,5 @@ import { toBigIntBE, toBufferBE } from '../bigint-buffer/index.js'; -import { randomBytes } from '../crypto/index.js'; +import { randomBytes } from '../crypto/random/index.js'; import { BufferReader } from '../serialize/buffer_reader.js'; const ZERO_BUFFER = Buffer.alloc(32); diff --git a/yarn-project/foundation/src/index.ts b/yarn-project/foundation/src/index.ts index c75ef10e563..7e06583f10d 100644 --- a/yarn-project/foundation/src/index.ts +++ b/yarn-project/foundation/src/index.ts @@ -21,6 +21,7 @@ export * as serialize from './serialize/index.js'; export * as sleep from './sleep/index.js'; export * as timer from './timer/index.js'; export * as transport from './transport/index.js'; +export * as trees from './trees/index.js'; export * as types from './types/index.js'; export * as url from './url/index.js'; export * as wasm from './wasm/index.js'; diff --git a/yarn-project/foundation/src/json-rpc/server/index.ts b/yarn-project/foundation/src/json-rpc/server/index.ts index 8495233b59d..9a32d317b08 100644 --- a/yarn-project/foundation/src/json-rpc/server/index.ts +++ b/yarn-project/foundation/src/json-rpc/server/index.ts @@ -1,2 +1,2 @@ -export { JsonRpcServer } from './json_rpc_server.js'; +export { JsonRpcServer, createStatusRouter } from './json_rpc_server.js'; export { JsonProxy } from './json_proxy.js'; diff --git a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts index 722f0c925f8..64651ea30bf 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts @@ -188,3 +188,16 @@ export class JsonRpcServer { httpServer.listen(port); } } + +/** + * Creates a router for handling a plain status request that will return 200 status when running. + * @param apiPrefix - The prefix to use for all api requests + * @returns - The router for handling status requests. + */ +export function createStatusRouter(apiPrefix = '') { + const router = new Router({ prefix: `${apiPrefix}` }); + router.get('/status', (ctx: Koa.Context) => { + ctx.status = 200; + }); + return router; +} diff --git a/yarn-project/foundation/src/trees/index.ts b/yarn-project/foundation/src/trees/index.ts new file mode 100644 index 00000000000..030a59f2570 --- /dev/null +++ b/yarn-project/foundation/src/trees/index.ts @@ -0,0 +1,48 @@ +/** + * A leaf of an indexed merkle tree. + */ +export interface IndexedTreeLeaf { + /** + * Returns key of the leaf. It's used for indexing. + */ + getKey(): bigint; + /** + * Serializes the leaf into a buffer. + */ + toBuffer(): Buffer; + /** + * Returns true if the leaf is empty. + */ + isEmpty(): boolean; +} + +/** + * Preimage of an indexed merkle tree leaf. + */ +export interface IndexedTreeLeafPreimage { + /** + * Returns key of the leaf corresponding to this preimage. + */ + getKey(): bigint; + /** + * Returns the key of the next leaf. + */ + getNextKey(): bigint; + /** + * Returns the index of the next leaf. + */ + getNextIndex(): bigint; + + /** + * Returns the preimage as a leaf. + */ + asLeaf(): IndexedTreeLeaf; + /** + * Serializes the preimage into a buffer. + */ + toBuffer(): Buffer; + /** + * Serializes the preimage to an array of buffers for hashing. + */ + toHashInputs(): Buffer[]; +} diff --git a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh index 1c4bd5e2ba9..37a07c35e17 100755 --- a/yarn-project/l1-artifacts/scripts/generate-artifacts.sh +++ b/yarn-project/l1-artifacts/scripts/generate-artifacts.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail; target_dir=./generated diff --git a/yarn-project/merkle-tree/package.json b/yarn-project/merkle-tree/package.json index 6418d219963..4c7b53d8f42 100644 --- a/yarn-project/merkle-tree/package.json +++ b/yarn-project/merkle-tree/package.json @@ -40,6 +40,7 @@ "tslib": "^2.4.0" }, "devDependencies": { + "@aztec/circuits.js": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/levelup": "^5.1.2", diff --git a/yarn-project/merkle-tree/src/index.ts b/yarn-project/merkle-tree/src/index.ts index 5181cecfc15..68826f44e42 100644 --- a/yarn-project/merkle-tree/src/index.ts +++ b/yarn-project/merkle-tree/src/index.ts @@ -4,7 +4,7 @@ export * from './interfaces/merkle_tree.js'; export * from './interfaces/update_only_tree.js'; export * from './pedersen.js'; export * from './sparse_tree/sparse_tree.js'; -export { LowLeafWitnessData, StandardIndexedTree } from './standard_indexed_tree/standard_indexed_tree.js'; +export { StandardIndexedTree } from './standard_indexed_tree/standard_indexed_tree.js'; export * from './standard_tree/standard_tree.js'; export { INITIAL_LEAF } from './tree_base.js'; export { newTree } from './new_tree.js'; diff --git a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts index 46c13f49bd9..eee22a3ee2a 100644 --- a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts @@ -1,8 +1,26 @@ -import { LeafData, SiblingPath } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { SiblingPath } from '@aztec/types'; -import { LowLeafWitnessData } from '../index.js'; import { AppendOnlyTree } from './append_only_tree.js'; +/** + * All of the data to be return during batch insertion. + */ +export interface LowLeafWitnessData { + /** + * Preimage of the low nullifier that proves non membership. + */ + leafPreimage: IndexedTreeLeafPreimage; + /** + * Sibling path to prove membership of low nullifier. + */ + siblingPath: SiblingPath; + /** + * The index of low nullifier. + */ + index: bigint; +} + /** * The result of a batch insertion in an indexed merkle tree. */ @@ -35,27 +53,30 @@ export interface IndexedTree extends AppendOnlyTree { * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue( + findIndexOfPreviousKey( newValue: bigint, includeUncommitted: boolean, - ): { - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }; + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + >; /** - * Gets the latest LeafData copy. - * @param index - Index of the leaf of which to obtain the LeafData copy. + * Gets the latest LeafPreimage copy. + * @param index - Index of the leaf of which to obtain the LeafPreimage copy. * @param includeUncommitted - If true, the uncommitted changes are included in the search. - * @returns A copy of the leaf data at the given index or undefined if the leaf was not found. + * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - getLatestLeafDataCopy(index: number, includeUncommitted: boolean): LeafData | undefined; + getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): Promise; /** * Batch insert multiple leaves into the tree. diff --git a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts index e4f65b326a2..ba3ffb4309b 100644 --- a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts @@ -49,4 +49,12 @@ export interface MerkleTree extends SiblingPathSource { * @param includeUncommitted - Set to true to include uncommitted updates in the data set. */ getLeafValue(index: bigint, includeUncommitted: boolean): Promise; + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param leaf - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + findLeafIndex(leaf: Buffer, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts b/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts index 6bd5c024d0c..06ce3a24096 100644 --- a/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts @@ -1,5 +1,3 @@ -import { LeafData } from '@aztec/types'; - import { TreeSnapshotBuilder } from '../snapshots/snapshot_builder.js'; import { MerkleTree } from './merkle_tree.js'; @@ -12,6 +10,5 @@ export interface UpdateOnlyTree extends MerkleTree, TreeSnapshotBuilder { * @param leaf - The leaf value to be updated. * @param index - The leaf to be updated. */ - // TODO: Make this strictly a Buffer - updateLeaf(leaf: Buffer | LeafData, index: bigint): Promise; + updateLeaf(leaf: Buffer, index: bigint): Promise; } diff --git a/yarn-project/merkle-tree/src/load_tree.ts b/yarn-project/merkle-tree/src/load_tree.ts index baabe852735..9753a2d528d 100644 --- a/yarn-project/merkle-tree/src/load_tree.ts +++ b/yarn-project/merkle-tree/src/load_tree.ts @@ -13,14 +13,14 @@ import { TreeBase, decodeMeta } from './tree_base.js'; * @returns The newly created tree. */ export async function loadTree( - c: new (...args: any[]) => T, + c: new (db: LevelUp, hasher: Hasher, name: string, depth: number, size: bigint, root: Buffer) => T, db: LevelUp, hasher: Hasher, name: string, ): Promise { const meta: Buffer = await db.get(name); const { root, depth, size } = decodeMeta(meta); + const tree = new c(db, hasher, name, depth, size, root); - await tree.initFromDb(); return tree; } diff --git a/yarn-project/merkle-tree/src/new_tree.ts b/yarn-project/merkle-tree/src/new_tree.ts index f1cd4c2d3b5..1395d012d25 100644 --- a/yarn-project/merkle-tree/src/new_tree.ts +++ b/yarn-project/merkle-tree/src/new_tree.ts @@ -15,14 +15,14 @@ import { TreeBase } from './tree_base.js'; * @returns The newly created tree. */ export async function newTree( - c: new (...args: any[]) => T, + c: new (db: LevelUp, hasher: Hasher, name: string, depth: number, size: bigint) => T, db: LevelUp, hasher: Hasher, name: string, depth: number, prefilledSize = 1, ): Promise { - const tree = new c(db, hasher, name, depth, 0n, undefined); + const tree = new c(db, hasher, name, depth, 0n); await tree.init(prefilledSize); return tree; } diff --git a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts index b530e981b27..46361bd5913 100644 --- a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts @@ -229,4 +229,15 @@ class AppendOnlySnapshot implements TreeSnapshot { return undefined; } } + + async findLeafIndex(value: Buffer): Promise { + const numLeaves = this.getNumLeaves(); + for (let i = 0n; i < numLeaves; i++) { + const currentValue = await this.getLeafValue(i); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts index d77204beafa..b1157cf9d44 100644 --- a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts @@ -65,7 +65,7 @@ export abstract class BaseFullTreeSnapshotBuilder depth) { // short circuit if we've reached the leaf level // otherwise getNode might throw if we ask for the children of a leaf - this.handleLeaf(i, node, batch); + await this.handleLeaf(i, node, batch); continue; } @@ -98,7 +98,7 @@ export abstract class BaseFullTreeSnapshotBuilder { @@ -218,4 +218,15 @@ export class BaseFullTreeSnapshot implements TreeSnapshot { path.reverse(); return path; } + + async findLeafIndex(value: Buffer): Promise { + const numLeaves = this.getNumLeaves(); + for (let i = 0n; i < numLeaves; i++) { + const currentValue = await this.getLeafValue(i); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts index 631531fcfc2..3846bbcc21d 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts @@ -1,3 +1,6 @@ +import { Fr, NullifierLeaf, NullifierLeafPreimage } from '@aztec/circuits.js'; +import { Hasher } from '@aztec/types'; + import levelup, { LevelUp } from 'levelup'; import { Pedersen, newTree } from '../index.js'; @@ -6,6 +9,12 @@ import { createMemDown } from '../test/utils/create_mem_down.js'; import { IndexedTreeSnapshotBuilder } from './indexed_tree_snapshot.js'; import { describeSnapshotBuilderTestSuite } from './snapshot_builder_test_suite.js'; +class NullifierTree extends StandardIndexedTreeWithAppend { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + describe('IndexedTreeSnapshotBuilder', () => { let db: LevelUp; let tree: StandardIndexedTreeWithAppend; @@ -13,15 +22,15 @@ describe('IndexedTreeSnapshotBuilder', () => { beforeEach(async () => { db = levelup(createMemDown()); - tree = await newTree(StandardIndexedTreeWithAppend, db, new Pedersen(), 'test', 4); - snapshotBuilder = new IndexedTreeSnapshotBuilder(db, tree); + tree = await newTree(NullifierTree, db, new Pedersen(), 'test', 4); + snapshotBuilder = new IndexedTreeSnapshotBuilder(db, tree, NullifierLeafPreimage); }); describeSnapshotBuilderTestSuite( () => tree, () => snapshotBuilder, async () => { - const newLeaves = Array.from({ length: 2 }).map(() => Buffer.from(Math.random().toString())); + const newLeaves = Array.from({ length: 2 }).map(() => new NullifierLeaf(Fr.random()).toBuffer()); await tree.appendLeaves(newLeaves); }, ); @@ -31,14 +40,14 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]); await tree.commit(); const expectedLeavesAtBlock1 = await Promise.all([ - tree.getLatestLeafDataCopy(0, false), - tree.getLatestLeafDataCopy(1, false), - tree.getLatestLeafDataCopy(2, false), + tree.getLatestLeafPreimageCopy(0n, false), + tree.getLatestLeafPreimageCopy(1n, false), + tree.getLatestLeafPreimageCopy(2n, false), // id'expect these to be undefined, but leaf 3 isn't? // must be some indexed-tree quirk I don't quite understand yet - tree.getLatestLeafDataCopy(3, false), - tree.getLatestLeafDataCopy(4, false), - tree.getLatestLeafDataCopy(5, false), + tree.getLatestLeafPreimageCopy(3n, false), + tree.getLatestLeafPreimageCopy(4n, false), + tree.getLatestLeafPreimageCopy(5n, false), ]); await snapshotBuilder.snapshot(1); @@ -46,35 +55,35 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('d'), Buffer.from('e'), Buffer.from('f')]); await tree.commit(); const expectedLeavesAtBlock2 = await Promise.all([ - tree.getLatestLeafDataCopy(0, false), - tree.getLatestLeafDataCopy(1, false), - tree.getLatestLeafDataCopy(2, false), - tree.getLatestLeafDataCopy(3, false), - tree.getLatestLeafDataCopy(4, false), - tree.getLatestLeafDataCopy(5, false), + tree.getLatestLeafPreimageCopy(0n, false), + tree.getLatestLeafPreimageCopy(1n, false), + tree.getLatestLeafPreimageCopy(2n, false), + tree.getLatestLeafPreimageCopy(3n, false), + tree.getLatestLeafPreimageCopy(4n, false), + tree.getLatestLeafPreimageCopy(5n, false), ]); await snapshotBuilder.snapshot(2); const snapshot1 = await snapshotBuilder.getSnapshot(1); const actualLeavesAtBlock1 = await Promise.all([ - snapshot1.getLatestLeafDataCopy(0n), - snapshot1.getLatestLeafDataCopy(1n), - snapshot1.getLatestLeafDataCopy(2n), - snapshot1.getLatestLeafDataCopy(3n), - snapshot1.getLatestLeafDataCopy(4n), - snapshot1.getLatestLeafDataCopy(5n), + snapshot1.getLatestLeafPreimageCopy(0n), + snapshot1.getLatestLeafPreimageCopy(1n), + snapshot1.getLatestLeafPreimageCopy(2n), + snapshot1.getLatestLeafPreimageCopy(3n), + snapshot1.getLatestLeafPreimageCopy(4n), + snapshot1.getLatestLeafPreimageCopy(5n), ]); expect(actualLeavesAtBlock1).toEqual(expectedLeavesAtBlock1); const snapshot2 = await snapshotBuilder.getSnapshot(2); const actualLeavesAtBlock2 = await Promise.all([ - snapshot2.getLatestLeafDataCopy(0n), - snapshot2.getLatestLeafDataCopy(1n), - snapshot2.getLatestLeafDataCopy(2n), - snapshot2.getLatestLeafDataCopy(3n), - snapshot2.getLatestLeafDataCopy(4n), - snapshot2.getLatestLeafDataCopy(5n), + snapshot2.getLatestLeafPreimageCopy(0n), + snapshot2.getLatestLeafPreimageCopy(1n), + snapshot2.getLatestLeafPreimageCopy(2n), + snapshot2.getLatestLeafPreimageCopy(3n), + snapshot2.getLatestLeafPreimageCopy(4n), + snapshot2.getLatestLeafPreimageCopy(5n), ]); expect(actualLeavesAtBlock2).toEqual(expectedLeavesAtBlock2); }); @@ -85,12 +94,12 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('a'), Buffer.from('f'), Buffer.from('d')]); await tree.commit(); const snapshot = await snapshotBuilder.snapshot(1); - const historicalPrevValue = tree.findIndexOfPreviousValue(2n, false); + const historicalPrevValue = await tree.findIndexOfPreviousKey(2n, false); await tree.appendLeaves([Buffer.from('c'), Buffer.from('b'), Buffer.from('e')]); await tree.commit(); - await expect(snapshot.findIndexOfPreviousValue(2n)).resolves.toEqual(historicalPrevValue); + await expect(snapshot.findIndexOfPreviousKey(2n)).resolves.toEqual(historicalPrevValue); }); }); }); diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts index 6725bd394e5..28aeefdc953 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts @@ -1,10 +1,9 @@ -import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { LeafData } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { LevelUp, LevelUpChain } from 'levelup'; import { IndexedTree } from '../interfaces/indexed_tree.js'; -import { decodeTreeValue, encodeTreeValue } from '../standard_indexed_tree/standard_indexed_tree.js'; +import { PreimageFactory } from '../standard_indexed_tree/standard_indexed_tree.js'; import { TreeBase } from '../tree_base.js'; import { BaseFullTreeSnapshot, BaseFullTreeSnapshotBuilder } from './base_full_snapshot.js'; import { IndexedTreeSnapshot, TreeSnapshotBuilder } from './snapshot_builder.js'; @@ -17,44 +16,54 @@ export class IndexedTreeSnapshotBuilder extends BaseFullTreeSnapshotBuilder implements TreeSnapshotBuilder { - constructor(db: LevelUp, tree: IndexedTree & TreeBase) { + constructor(db: LevelUp, tree: IndexedTree & TreeBase, private leafPreimageBuilder: PreimageFactory) { super(db, tree); } protected openSnapshot(root: Buffer, numLeaves: bigint): IndexedTreeSnapshot { - return new IndexedTreeSnapshotImpl(this.db, root, numLeaves, this.tree); + return new IndexedTreeSnapshotImpl(this.db, root, numLeaves, this.tree, this.leafPreimageBuilder); } - protected handleLeaf(index: bigint, node: Buffer, batch: LevelUpChain) { - const leafData = this.tree.getLatestLeafDataCopy(Number(index), false); - if (leafData) { - batch.put(snapshotLeafValue(node, index), encodeTreeValue(leafData)); + protected async handleLeaf(index: bigint, node: Buffer, batch: LevelUpChain) { + const leafPreimage = await this.tree.getLatestLeafPreimageCopy(index, false); + if (leafPreimage) { + batch.put(snapshotLeafValue(node, index), leafPreimage.toBuffer()); } } } /** A snapshot of an indexed tree at a particular point in time */ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTreeSnapshot { + constructor( + db: LevelUp, + historicRoot: Buffer, + numLeaves: bigint, + tree: IndexedTree & TreeBase, + private leafPreimageBuilder: PreimageFactory, + ) { + super(db, historicRoot, numLeaves, tree); + } + async getLeafValue(index: bigint): Promise { - const leafData = await this.getLatestLeafDataCopy(index); - return leafData ? toBufferBE(leafData.value, 32) : undefined; + const leafPreimage = await this.getLatestLeafPreimageCopy(index); + return leafPreimage?.toBuffer(); } - async getLatestLeafDataCopy(index: bigint): Promise { + async getLatestLeafPreimageCopy(index: bigint): Promise { const leafNode = await super.getLeafValue(index); const leafValue = await this.db.get(snapshotLeafValue(leafNode!, index)).catch(() => undefined); if (leafValue) { - return decodeTreeValue(leafValue); + return this.leafPreimageBuilder.fromBuffer(leafValue); } else { return undefined; } } - async findIndexOfPreviousValue(newValue: bigint): Promise<{ + async findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ - index: number; + index: bigint; /** * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ @@ -65,18 +74,18 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTre for (let i = 0; i < numLeaves; i++) { // this is very inefficient - const storedLeaf = await this.getLatestLeafDataCopy(BigInt(i))!; + const storedLeaf = await this.getLatestLeafPreimageCopy(BigInt(i))!; // The stored leaf can be undefined if it addresses an empty leaf // If the leaf is empty we do the same as if the leaf was larger if (storedLeaf === undefined) { diff.push(newValue); - } else if (storedLeaf.value > newValue) { + } else if (storedLeaf.getKey() > newValue) { diff.push(newValue); - } else if (storedLeaf.value === newValue) { - return { index: i, alreadyPresent: true }; + } else if (storedLeaf.getKey() === newValue) { + return { index: BigInt(i), alreadyPresent: true }; } else { - diff.push(newValue - storedLeaf.value); + diff.push(newValue - storedLeaf.getKey()); } } @@ -87,6 +96,13 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTre } } - return { index: minIndex, alreadyPresent: false }; + return { index: BigInt(minIndex), alreadyPresent: false }; + } + + async findLeafIndex(value: Buffer): Promise { + const index = await this.tree.findLeafIndex(value, false); + if (index !== undefined && index < this.getNumLeaves()) { + return index; + } } } diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts index a6722306301..b1fd74f9bdc 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts @@ -1,4 +1,5 @@ -import { LeafData, SiblingPath } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { SiblingPath } from '@aztec/types'; /** * An interface for a tree that can record snapshots of its contents. @@ -47,6 +48,14 @@ export interface TreeSnapshot { * @param index - The index of the leaf for which a sibling path is required. */ getSiblingPath(index: bigint): Promise>; + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param treeId - The ID of the tree. + * @param value - The leaf value to look for. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + findLeafIndex(value: Buffer): Promise; } /** A snapshot of an indexed tree */ @@ -55,18 +64,18 @@ export interface IndexedTreeSnapshot extends TreeSnapshot { * Gets the historical data for a leaf * @param index - The index of the leaf to get the data for */ - getLatestLeafDataCopy(index: bigint): Promise; + getLatestLeafPreimageCopy(index: bigint): Promise; /** * Finds the index of the largest leaf whose value is less than or equal to the provided value. * @param newValue - The new value to be inserted into the tree. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue(newValue: bigint): Promise<{ + findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ - index: number; + index: bigint; /** * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts index 3b66c36164c..f50ff1d69ae 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts @@ -186,6 +186,7 @@ export function describeSnapshotBuilderTestSuite { + it('returns the historical leaf index when the snapshot was taken', async () => { + await modifyTree(tree); + await tree.commit(); + const snapshot = await snapshotBuilder.snapshot(1); + + const initialLastLeafIndex = tree.getNumLeaves(false) - 1n; + let lastLeaf = await tree.getLeafValue(initialLastLeafIndex, false); + expect(await snapshot.findLeafIndex(lastLeaf!)).toBe(initialLastLeafIndex); + + await modifyTree(tree); + await tree.commit(); + + const newLastLeafIndex = tree.getNumLeaves(false) - 1n; + lastLeaf = await tree.getLeafValue(newLastLeafIndex, false); + + expect(await snapshot.findLeafIndex(lastLeaf!)).toBe(undefined); + }); + }); }); } diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts index 92cdc4152fc..138ca8f21e7 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts @@ -41,4 +41,8 @@ export class SparseTree extends TreeBase implements UpdateOnlyTree { public getSnapshot(block: number): Promise { return this.#snapshotBuilder.getSnapshot(block); } + + public findLeafIndex(_value: Buffer, _includeUncommitted: boolean): Promise { + throw new Error('Finding leaf index is not supported for sparse trees'); + } } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index ebbf3a3d0ee..325b438a0f1 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -1,87 +1,107 @@ import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; -import { LeafData, SiblingPath } from '@aztec/types'; - -import { BatchInsertionResult, IndexedTree } from '../interfaces/indexed_tree.js'; -import { IndexedTreeSnapshotBuilder } from '../snapshots/indexed_tree_snapshot.js'; -import { IndexedTreeSnapshot } from '../snapshots/snapshot_builder.js'; +import { IndexedTreeLeaf, IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { Hasher, SiblingPath } from '@aztec/types'; + +import { LevelUp } from 'levelup'; + +import { + BatchInsertionResult, + IndexedTree, + IndexedTreeSnapshot, + IndexedTreeSnapshotBuilder, + LowLeafWitnessData, +} from '../index.js'; import { TreeBase } from '../tree_base.js'; const log = createDebugLogger('aztec:standard-indexed-tree'); -const indexToKeyLeaf = (name: string, index: bigint) => { - return `${name}:leaf:${toBufferBE(index, 32).toString('hex')}`; -}; - -const keyLeafToIndex = (key: string): bigint => { - const index = key.split(':')[2]; - return toBigIntBE(Buffer.from(index, 'hex')); -}; - -const zeroLeaf: LeafData = { - value: 0n, - nextValue: 0n, - nextIndex: 0n, -}; - /** - * All of the data to be return during batch insertion. + * Factory for creating leaf preimages. */ -export interface LowLeafWitnessData { +export interface PreimageFactory { + /** + * Creates a new preimage from a leaf. + * @param leaf - Leaf to create a preimage from. + * @param nextKey - Next key of the leaf. + * @param nextIndex - Next index of the leaf. + */ + fromLeaf(leaf: IndexedTreeLeaf, nextKey: bigint, nextIndex: bigint): IndexedTreeLeafPreimage; + /** + * Creates a new preimage from a buffer. + * @param buffer - Buffer to create a preimage from. + */ + fromBuffer(buffer: Buffer): IndexedTreeLeafPreimage; /** - * Preimage of the low nullifier that proves non membership. + * Creates an empty preimage. */ - leafData: LeafData; + empty(): IndexedTreeLeafPreimage; /** - * Sibling path to prove membership of low nullifier. + * Creates a copy of a preimage. + * @param preimage - Preimage to be cloned. */ - siblingPath: SiblingPath; + clone(preimage: IndexedTreeLeafPreimage): IndexedTreeLeafPreimage; +} + +/** + * Factory for creating leaves. + */ +export interface LeafFactory { + /** + * Creates a new leaf from a buffer. + * @param key - Key of the leaf. + */ + buildDummy(key: bigint): IndexedTreeLeaf; /** - * The index of low nullifier. + * Creates a new leaf from a buffer. + * @param buffer - Buffer to create a leaf from. */ - index: bigint; + fromBuffer(buffer: Buffer): IndexedTreeLeaf; } +export const buildDbKeyForPreimage = (name: string, index: bigint) => { + return `${name}:leaf_by_index:${toBufferBE(index, 32).toString('hex')}`; +}; + +export const buildDbKeyForLeafIndex = (name: string, key: bigint) => { + return `${name}:leaf_index_by_leaf_key:${toBufferBE(key, 32).toString('hex')}`; +}; + /** * Pre-compute empty witness. * @param treeHeight - Height of tree for sibling path. * @returns An empty witness. */ -function getEmptyLowLeafWitness(treeHeight: N): LowLeafWitnessData { +function getEmptyLowLeafWitness( + treeHeight: N, + leafPreimageFactory: PreimageFactory, +): LowLeafWitnessData { return { - leafData: zeroLeaf, + leafPreimage: leafPreimageFactory.empty(), index: 0n, siblingPath: new SiblingPath(treeHeight, Array(treeHeight).fill(toBufferBE(0n, 32))), }; } -export const encodeTreeValue = (leafData: LeafData) => { - const valueAsBuffer = toBufferBE(leafData.value, 32); - const indexAsBuffer = toBufferBE(leafData.nextIndex, 32); - const nextValueAsBuffer = toBufferBE(leafData.nextValue, 32); - return Buffer.concat([valueAsBuffer, indexAsBuffer, nextValueAsBuffer]); -}; - -export const decodeTreeValue = (buf: Buffer) => { - const value = toBigIntBE(buf.subarray(0, 32)); - const nextIndex = toBigIntBE(buf.subarray(32, 64)); - const nextValue = toBigIntBE(buf.subarray(64, 96)); - return { - value, - nextIndex, - nextValue, - } as LeafData; -}; - /** - * Indexed merkle tree. + * Standard implementation of an indexed tree. */ export class StandardIndexedTree extends TreeBase implements IndexedTree { - #snapshotBuilder = new IndexedTreeSnapshotBuilder(this.db, this); - - protected leaves: LeafData[] = []; - protected cachedLeaves: { [key: number]: LeafData } = {}; + #snapshotBuilder = new IndexedTreeSnapshotBuilder(this.db, this, this.leafPreimageFactory); + protected cachedLeafPreimages: { [key: string]: IndexedTreeLeafPreimage } = {}; + + public constructor( + db: LevelUp, + hasher: Hasher, + name: string, + depth: number, + size: bigint = 0n, + protected leafPreimageFactory: PreimageFactory, + protected leafFactory: LeafFactory, + root?: Buffer, + ) { + super(db, hasher, name, depth, size, root); + } /** * Appends the given leaves to the tree. @@ -89,7 +109,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * @returns Empty promise. * @remarks Use batchInsert method instead. */ - public appendLeaves(_leaves: Buffer[]): Promise { + appendLeaves(_leaves: Buffer[]): Promise { throw new Error('Not implemented'); } @@ -117,88 +137,149 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * @param includeUncommitted - Indicates whether to include uncommitted leaves in the computation. * @returns The value of the leaf at the given index or undefined if the leaf is empty. */ - public getLeafValue(index: bigint, includeUncommitted: boolean): Promise { - const leaf = this.getLatestLeafDataCopy(Number(index), includeUncommitted); - if (!leaf) { - return Promise.resolve(undefined); - } - return Promise.resolve(toBufferBE(leaf.value, 32)); + public async getLeafValue(index: bigint, includeUncommitted: boolean): Promise { + const preimage = await this.getLatestLeafPreimageCopy(index, includeUncommitted); + return preimage && preimage.toBuffer(); } /** * Finds the index of the largest leaf whose value is less than or equal to the provided value. - * @param newValue - The new value to be inserted into the tree. + * @param newKey - The new key to be inserted into the tree. * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue( - newValue: bigint, + async findIndexOfPreviousKey( + newKey: bigint, includeUncommitted: boolean, - ): { - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - } { - const numLeaves = this.getNumLeaves(includeUncommitted); - const diff: bigint[] = []; - - for (let i = 0; i < numLeaves; i++) { - const storedLeaf = this.getLatestLeafDataCopy(i, includeUncommitted)!; - - // The stored leaf can be undefined if it addresses an empty leaf - // If the leaf is empty we do the same as if the leaf was larger - if (storedLeaf === undefined) { - diff.push(newValue); - } else if (storedLeaf.value > newValue) { - diff.push(newValue); - } else if (storedLeaf.value === newValue) { - return { index: i, alreadyPresent: true }; - } else { - diff.push(newValue - storedLeaf.value); + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { + let lowLeafIndex = await this.getDbLowLeafIndex(newKey); + let lowLeafPreimage = lowLeafIndex !== undefined ? await this.getDbPreimage(lowLeafIndex) : undefined; + + if (includeUncommitted) { + const cachedLowLeafIndex = this.getCachedLowLeafIndex(newKey); + if (cachedLowLeafIndex !== undefined) { + const cachedLowLeafPreimage = this.getCachedPreimage(cachedLowLeafIndex)!; + if (!lowLeafPreimage || cachedLowLeafPreimage.getKey() > lowLeafPreimage.getKey()) { + lowLeafIndex = cachedLowLeafIndex; + lowLeafPreimage = cachedLowLeafPreimage; + } } } - const minIndex = this.findMinIndex(diff); - return { index: minIndex, alreadyPresent: false }; + + if (lowLeafIndex === undefined || !lowLeafPreimage) { + return undefined; + } + + return { + index: lowLeafIndex, + alreadyPresent: lowLeafPreimage.getKey() === newKey, + }; + } + + private getCachedLowLeafIndex(key: bigint): bigint | undefined { + const indexes = Object.getOwnPropertyNames(this.cachedLeafPreimages); + const lowLeafIndexes = indexes + .map(index => ({ + index: BigInt(index), + key: this.cachedLeafPreimages[index].getKey(), + })) + .filter(({ key: candidateKey }) => candidateKey <= key) + .sort((a, b) => Number(b.key - a.key)); + return lowLeafIndexes[0]?.index; + } + + private getCachedLeafIndex(key: bigint): bigint | undefined { + const index = Object.keys(this.cachedLeafPreimages).find(index => { + return this.cachedLeafPreimages[index].getKey() === key; + }); + if (index) { + return BigInt(index); + } + return undefined; + } + + private async getDbLowLeafIndex(key: bigint): Promise { + return await new Promise((resolve, reject) => { + let lowLeafIndex: bigint | undefined; + this.db + .createReadStream({ + gte: buildDbKeyForLeafIndex(this.getName(), 0n), + lte: buildDbKeyForLeafIndex(this.getName(), key), + limit: 1, + reverse: true, + }) + .on('data', data => { + lowLeafIndex = toBigIntBE(data.value); + }) + .on('close', function () {}) + .on('end', function () { + resolve(lowLeafIndex); + }) + .on('error', function () { + log.error('stream error'); + reject(); + }); + }); + } + + private async getDbPreimage(index: bigint): Promise { + const dbPreimage = await this.db + .get(buildDbKeyForPreimage(this.getName(), index)) + .then(data => this.leafPreimageFactory.fromBuffer(data)) + .catch(() => undefined); + return dbPreimage; + } + + private getCachedPreimage(index: bigint): IndexedTreeLeafPreimage | undefined { + return this.cachedLeafPreimages[index.toString()]; } /** - * Gets the latest LeafData copy. - * @param index - Index of the leaf of which to obtain the LeafData copy. + * Gets the latest LeafPreimage copy. + * @param index - Index of the leaf of which to obtain the LeafPreimage copy. * @param includeUncommitted - If true, the uncommitted changes are included in the search. - * @returns A copy of the leaf data at the given index or undefined if the leaf was not found. + * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - public getLatestLeafDataCopy(index: number, includeUncommitted: boolean): LeafData | undefined { - const leaf = !includeUncommitted ? this.leaves[index] : this.cachedLeaves[index] ?? this.leaves[index]; - return leaf - ? ({ - value: leaf.value, - nextIndex: leaf.nextIndex, - nextValue: leaf.nextValue, - } as LeafData) - : undefined; + public async getLatestLeafPreimageCopy( + index: bigint, + includeUncommitted: boolean, + ): Promise { + const preimage = !includeUncommitted + ? await this.getDbPreimage(index) + : this.getCachedPreimage(index) ?? (await this.getDbPreimage(index)); + return preimage && this.leafPreimageFactory.clone(preimage); } /** - * Finds the index of the minimum value in an array. - * @param values - The collection of values to be searched. - * @returns The index of the minimum value in the array. + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param value - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). */ - private findMinIndex(values: bigint[]) { - if (!values.length) { - return 0; + public async findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise { + const leaf = this.leafFactory.fromBuffer(value); + let index = await this.db + .get(buildDbKeyForLeafIndex(this.getName(), leaf.getKey())) + .then(data => toBigIntBE(data)) + .catch(() => undefined); + + if (includeUncommitted && index === undefined) { + const cachedIndex = this.getCachedLeafIndex(leaf.getKey()); + index = cachedIndex; } - let minIndex = 0; - for (let i = 1; i < values.length; i++) { - if (values[minIndex] > values[i]) { - minIndex = i; - } - } - return minIndex; + return index; } /** @@ -220,66 +301,31 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { throw new Error(`Prefilled size must be at least 1!`); } - const leaves: LeafData[] = []; + const leaves: IndexedTreeLeafPreimage[] = []; for (let i = 0n; i < prefilledSize; i++) { - const newLeaf = { - value: toBigIntBE(Buffer.from([Number(i)])), - nextIndex: i + 1n, - nextValue: i + 1n, - }; - leaves.push(newLeaf); + const newLeaf = this.leafFactory.buildDummy(i); + const newLeafPreimage = this.leafPreimageFactory.fromLeaf(newLeaf, i + 1n, i + 1n); + leaves.push(newLeafPreimage); } - // Make the first leaf have 0 value - leaves[0].value = 0n; - // Make the last leaf point to the first leaf - leaves[prefilledSize - 1].nextIndex = 0n; - leaves[prefilledSize - 1].nextValue = 0n; + leaves[prefilledSize - 1] = this.leafPreimageFactory.fromLeaf(leaves[prefilledSize - 1].asLeaf(), 0n, 0n); await this.encodeAndAppendLeaves(leaves, true); await this.commit(); } - /** - * Loads Merkle tree data from a database and assigns them to this object. - */ - public async initFromDb(): Promise { - const startingIndex = 0n; - const values: LeafData[] = []; - const promise = new Promise((resolve, reject) => { - this.db - .createReadStream({ - gte: indexToKeyLeaf(this.getName(), startingIndex), - lte: indexToKeyLeaf(this.getName(), 2n ** BigInt(this.getDepth())), - }) - .on('data', function (data) { - const index = keyLeafToIndex(data.key.toString('utf-8')); - values[Number(index)] = decodeTreeValue(data.value); - }) - .on('close', function () {}) - .on('end', function () { - resolve(); - }) - .on('error', function () { - log.error('stream error'); - reject(); - }); - }); - await promise; - this.leaves = values; - } - /** * Commits all the leaves to the database and removes them from a cache. */ private async commitLeaves(): Promise { const batch = this.db.batch(); - const keys = Object.getOwnPropertyNames(this.cachedLeaves); + const keys = Object.getOwnPropertyNames(this.cachedLeafPreimages); for (const key of keys) { - const index = Number(key); - batch.put(indexToKeyLeaf(this.getName(), BigInt(index)), encodeTreeValue(this.cachedLeaves[index])); - this.leaves[index] = this.cachedLeaves[index]; + const leaf = this.cachedLeafPreimages[key]; + const index = BigInt(key); + batch.put(buildDbKeyForPreimage(this.getName(), index), leaf.toBuffer()); + batch.put(buildDbKeyForLeafIndex(this.getName(), leaf.getKey()), toBufferBE(index, 32)); } await batch.write(); this.clearCachedLeaves(); @@ -289,20 +335,21 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * Clears the cache. */ private clearCachedLeaves() { - this.cachedLeaves = {}; + this.cachedLeafPreimages = {}; } /** * Updates a leaf in the tree. - * @param leaf - New contents of the leaf. + * @param preimage - New contents of the leaf. * @param index - Index of the leaf to be updated. */ - protected async updateLeaf(leaf: LeafData, index: bigint) { + protected async updateLeaf(preimage: IndexedTreeLeafPreimage, index: bigint) { if (index > this.maxIndex) { throw Error(`Index out of bounds. Index ${index}, max index: ${this.maxIndex}.`); } - const encodedLeaf = this.encodeLeaf(leaf, true); + this.cachedLeafPreimages[index.toString()] = preimage; + const encodedLeaf = this.encodeLeaf(preimage, true); await this.addLeafToCacheAndHashToRoot(encodedLeaf, index); const numLeaves = this.getNumLeaves(true); if (index >= numLeaves) { @@ -426,45 +473,45 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { leaves: Buffer[], subtreeHeight: SubtreeHeight, ): Promise> { - const emptyLowLeafWitness = getEmptyLowLeafWitness(this.getDepth() as TreeHeight); + const emptyLowLeafWitness = getEmptyLowLeafWitness(this.getDepth() as TreeHeight, this.leafPreimageFactory); // Accumulators const lowLeavesWitnesses: LowLeafWitnessData[] = leaves.map(() => emptyLowLeafWitness); - const pendingInsertionSubtree: LeafData[] = leaves.map(() => zeroLeaf); + const pendingInsertionSubtree: IndexedTreeLeafPreimage[] = leaves.map(() => this.leafPreimageFactory.empty()); // Start info const startInsertionIndex = this.getNumLeaves(true); - const leavesToInsert = leaves.map(leaf => toBigIntBE(leaf)); + const leavesToInsert = leaves.map(leaf => this.leafFactory.fromBuffer(leaf)); const sortedDescendingLeafTuples = leavesToInsert .map((leaf, index) => ({ leaf, index })) - .sort((a, b) => Number(b.leaf - a.leaf)); + .sort((a, b) => Number(b.leaf.getKey() - a.leaf.getKey())); const sortedDescendingLeaves = sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf); // Get insertion path for each leaf for (let i = 0; i < leavesToInsert.length; i++) { - const newValue = sortedDescendingLeaves[i]; - const originalIndex = leavesToInsert.indexOf(newValue); + const newLeaf = sortedDescendingLeaves[i]; + const originalIndex = leavesToInsert.indexOf(newLeaf); - if (newValue === 0n) { + if (newLeaf.isEmpty()) { continue; } - const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true); - - // get the low leaf - const lowLeaf = this.getLatestLeafDataCopy(indexOfPrevious.index, true); - if (lowLeaf === undefined) { + const indexOfPrevious = await this.findIndexOfPreviousKey(newLeaf.getKey(), true); + if (indexOfPrevious === undefined) { return { lowLeavesWitnessData: undefined, - sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => new Fr(leafTuple.leaf).toBuffer()), + sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf.toBuffer()), sortedNewLeavesIndexes: sortedDescendingLeafTuples.map(leafTuple => leafTuple.index), newSubtreeSiblingPath: await this.getSubtreeSiblingPath(subtreeHeight, true), }; } + + // get the low leaf (existence checked in getting index) + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(indexOfPrevious.index, true))!; const siblingPath = await this.getSiblingPath(BigInt(indexOfPrevious.index), true); const witness: LowLeafWitnessData = { - leafData: { ...lowLeaf }, + leafPreimage: lowLeafPreimage, index: BigInt(indexOfPrevious.index), siblingPath, }; @@ -472,20 +519,23 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { // Update the running paths lowLeavesWitnesses[i] = witness; - const currentPendingLeaf: LeafData = { - value: newValue, - nextValue: lowLeaf.nextValue, - nextIndex: lowLeaf.nextIndex, - }; + const currentPendingPreimageLeaf = this.leafPreimageFactory.fromLeaf( + newLeaf, + lowLeafPreimage.getNextKey(), + lowLeafPreimage.getNextIndex(), + ); - pendingInsertionSubtree[originalIndex] = currentPendingLeaf; + pendingInsertionSubtree[originalIndex] = currentPendingPreimageLeaf; - lowLeaf.nextValue = newValue; - lowLeaf.nextIndex = startInsertionIndex + BigInt(originalIndex); + const newLowLeafPreimage = this.leafPreimageFactory.fromLeaf( + lowLeafPreimage.asLeaf(), + newLeaf.getKey(), + startInsertionIndex + BigInt(originalIndex), + ); const lowLeafIndex = indexOfPrevious.index; - this.cachedLeaves[lowLeafIndex] = lowLeaf; - await this.updateLeaf(lowLeaf, BigInt(lowLeafIndex)); + this.cachedLeafPreimages[lowLeafIndex.toString()] = newLowLeafPreimage; + await this.updateLeaf(newLowLeafPreimage, lowLeafIndex); } const newSubtreeSiblingPath = await this.getSubtreeSiblingPath( @@ -500,7 +550,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { return { lowLeavesWitnessData: lowLeavesWitnesses, - sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => Buffer.from(new Fr(leafTuple.leaf).toBuffer())), + sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf.toBuffer()), sortedNewLeavesIndexes: sortedDescendingLeafTuples.map(leafTuple => leafTuple.index), newSubtreeSiblingPath, }; @@ -527,19 +577,19 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { /** * Encodes leaves and appends them to a tree. - * @param leaves - Leaves to encode. + * @param preimages - Leaves to encode. * @param hash0Leaf - Indicates whether 0 value leaf should be hashed. See {@link encodeLeaf}. * @returns Empty promise */ - private async encodeAndAppendLeaves(leaves: LeafData[], hash0Leaf: boolean): Promise { - const startInsertionIndex = Number(this.getNumLeaves(true)); + private async encodeAndAppendLeaves(preimages: IndexedTreeLeafPreimage[], hash0Leaf: boolean): Promise { + const startInsertionIndex = this.getNumLeaves(true); - const serializedLeaves = leaves.map((leaf, i) => { - this.cachedLeaves[startInsertionIndex + i] = leaf; - return this.encodeLeaf(leaf, hash0Leaf); + const hashedLeaves = preimages.map((preimage, i) => { + this.cachedLeafPreimages[(startInsertionIndex + BigInt(i)).toString()] = preimage; + return this.encodeLeaf(preimage, hash0Leaf); }); - await super.appendLeaves(serializedLeaves); + await super.appendLeaves(hashedLeaves); } /** @@ -550,14 +600,12 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * nullifier it is improbable that a valid nullifier would be 0. * @returns Leaf encoded in a buffer. */ - private encodeLeaf(leaf: LeafData, hash0Leaf: boolean): Buffer { + private encodeLeaf(leaf: IndexedTreeLeafPreimage, hash0Leaf: boolean): Buffer { let encodedLeaf; - if (!hash0Leaf && leaf.value == 0n) { + if (!hash0Leaf && leaf.getKey() == 0n) { encodedLeaf = toBufferBE(0n, 32); } else { - encodedLeaf = this.hasher.hashInputs( - [leaf.value, leaf.nextIndex, leaf.nextValue].map(val => toBufferBE(val, 32)), - ); + encodedLeaf = this.hasher.hashInputs(leaf.toHashInputs()); } return encodedLeaf; } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts index 2f6db0b1ac1..9ebc8c30472 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts @@ -1,3 +1,4 @@ +import { Fr, NullifierLeaf, NullifierLeafPreimage } from '@aztec/circuits.js'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Hasher, SiblingPath } from '@aztec/types'; @@ -8,16 +9,22 @@ import { treeTestSuite } from '../../test/test_suite.js'; import { createMemDown } from '../../test/utils/create_mem_down.js'; import { StandardIndexedTreeWithAppend } from './standard_indexed_tree_with_append.js'; +class NullifierTree extends StandardIndexedTreeWithAppend { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + const createDb = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string, depth: number, prefilledSize = 1) => { - return await newTree(StandardIndexedTreeWithAppend, levelUp, hasher, name, depth, prefilledSize); + return await newTree(NullifierTree, levelUp, hasher, name, depth, prefilledSize); }; const createFromName = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string) => { - return await loadTree(StandardIndexedTreeWithAppend, levelUp, hasher, name); + return await loadTree(NullifierTree, levelUp, hasher, name); }; -const createIndexedTreeLeaf = (value: number, nextIndex: number, nextValue: number) => { - return [toBufferBE(BigInt(value), 32), toBufferBE(BigInt(nextIndex), 32), toBufferBE(BigInt(nextValue), 32)]; +const createIndexedTreeLeafHashInputs = (value: number, nextIndex: number, nextValue: number) => { + return new NullifierLeafPreimage(new Fr(value), new Fr(nextValue), BigInt(nextIndex)).toHashInputs(); }; const verifyCommittedState = async ( @@ -57,7 +64,7 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 0 0 0 0 0 0 0 0. */ - const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 0, 0)); const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); @@ -91,8 +98,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, level1ZeroHash); root = pedersen.hash(e20, level2ZeroHash); @@ -118,8 +125,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 1, 30)); e10 = pedersen.hash(index0Hash, index1Hash); let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); e20 = pedersen.hash(e10, e11); @@ -151,8 +158,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 10 0 20 30 0 0 0 0. */ e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(20, 1, 30)); e11 = pedersen.hash(index2Hash, index3Hash); e20 = pedersen.hash(e10, e11); root = pedersen.hash(e20, level2ZeroHash); @@ -182,8 +189,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 4 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 4, 50)); - const index4Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 4, 50)); + const index4Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(50, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, e11); const e12 = pedersen.hash(index4Hash, INITIAL_LEAF); @@ -255,7 +262,7 @@ describe('StandardIndexedTreeSpecific', () => { */ const INITIAL_LEAF = toBufferBE(0n, 32); - const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 0, 0)); const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; @@ -289,8 +296,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, level1ZeroHash); root = pedersen.hash(e20, level2ZeroHash); @@ -315,8 +322,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 1, 30)); e10 = pedersen.hash(index0Hash, index1Hash); let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); e20 = pedersen.hash(e10, e11); @@ -348,8 +355,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 10 0 20 30 0 0 0 0. */ e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(20, 1, 30)); e11 = pedersen.hash(index2Hash, index3Hash); e20 = pedersen.hash(e10, e11); root = pedersen.hash(e20, level2ZeroHash); @@ -387,8 +394,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 6 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 6, 50)); - const index6Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 6, 50)); + const index6Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(50, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, e11); const e13 = pedersen.hash(index6Hash, INITIAL_LEAF); @@ -469,4 +476,20 @@ describe('StandardIndexedTreeSpecific', () => { const actualRoot = insertTree.getRoot(true); expect(actualRoot).toEqual(expectedRoot); }); + + it('should be able to find indexes of leaves', async () => { + const db = levelup(createMemDown()); + const tree = await createDb(db, pedersen, 'test', 3); + const values = [Buffer.alloc(32, 1), Buffer.alloc(32, 2)]; + + await tree.appendLeaves([values[0]]); + + expect(await tree.findLeafIndex(values[0], true)).toBeDefined(); + expect(await tree.findLeafIndex(values[0], false)).toBe(undefined); + expect(await tree.findLeafIndex(values[1], true)).toBe(undefined); + + await tree.commit(); + + expect(await tree.findLeafIndex(values[0], false)).toBeDefined(); + }); }); diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts index 49a90e611f1..990f4e6ef5f 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts @@ -1,6 +1,3 @@ -import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; -import { LeafData } from '@aztec/types'; - import { StandardIndexedTree } from '../../index.js'; /** @@ -27,10 +24,10 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { * @returns Empty promise. */ private async appendLeaf(leaf: Buffer): Promise { - const newValue = toBigIntBE(leaf); + const newLeaf = this.leafFactory.fromBuffer(leaf); // Special case when appending zero - if (newValue === 0n) { + if (newLeaf.getKey() === 0n) { const newSize = (this.cachedSize ?? this.size) + 1n; if (newSize - 1n > this.maxIndex) { throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`); @@ -39,27 +36,31 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { return; } - const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true); - const previousLeafCopy = this.getLatestLeafDataCopy(indexOfPrevious.index, true); - - if (previousLeafCopy === undefined) { + const lowLeafIndex = await this.findIndexOfPreviousKey(newLeaf.getKey(), true); + if (lowLeafIndex === undefined) { throw new Error(`Previous leaf not found!`); } - const newLeaf = { - value: newValue, - nextIndex: previousLeafCopy.nextIndex, - nextValue: previousLeafCopy.nextValue, - } as LeafData; - if (indexOfPrevious.alreadyPresent) { + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(lowLeafIndex.index, true))!; + + const newLeafPreimage = this.leafPreimageFactory.fromLeaf( + newLeaf, + lowLeafPreimage.getNextKey(), + lowLeafPreimage.getNextIndex(), + ); + + if (lowLeafIndex.alreadyPresent) { return; } // insert a new leaf at the highest index and update the values of our previous leaf copy const currentSize = this.getNumLeaves(true); - previousLeafCopy.nextIndex = BigInt(currentSize); - previousLeafCopy.nextValue = newLeaf.value; - this.cachedLeaves[Number(currentSize)] = newLeaf; - this.cachedLeaves[Number(indexOfPrevious.index)] = previousLeafCopy; - await this.updateLeaf(previousLeafCopy, BigInt(indexOfPrevious.index)); - await this.updateLeaf(newLeaf, this.getNumLeaves(true)); + const newLowLeafPreimage = this.leafPreimageFactory.fromLeaf( + lowLeafPreimage.asLeaf(), + newLeaf.getKey(), + BigInt(currentSize), + ); + this.cachedLeafPreimages[Number(currentSize)] = newLeafPreimage; + this.cachedLeafPreimages[Number(lowLeafIndex.index)] = newLowLeafPreimage; + await this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); + await this.updateLeaf(newLeafPreimage, this.getNumLeaves(true)); } } diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index ee3191f42ff..b211017d851 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -69,4 +69,20 @@ describe('StandardTree_batchAppend', () => { expect(tree.getRoot(true)).toEqual(root); }); + + it('should be able to find indexes of leaves', async () => { + const db = levelup(createMemDown()); + const tree = await createDb(db, pedersen, 'test', 3); + const values = [Buffer.alloc(32, 1), Buffer.alloc(32, 2)]; + + await tree.appendLeaves([values[0]]); + + expect(await tree.findLeafIndex(values[0], true)).toBeDefined(); + expect(await tree.findLeafIndex(values[0], false)).toBe(undefined); + expect(await tree.findLeafIndex(values[1], true)).toBe(undefined); + + await tree.commit(); + + expect(await tree.findLeafIndex(values[0], false)).toBeDefined(); + }); }); diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts index 0b92572a4b8..55b4f532469 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts @@ -24,4 +24,14 @@ export class StandardTree extends TreeBase implements AppendOnlyTree { public getSnapshot(block: number): Promise { return this.#snapshotBuilder.getSnapshot(block); } + + public async findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise { + for (let i = 0n; i < this.getNumLeaves(includeUncommitted); i++) { + const currentValue = await this.getLeafValue(i, includeUncommitted); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/tree_base.ts b/yarn-project/merkle-tree/src/tree_base.ts index c57a0499171..291ac258082 100644 --- a/yarn-project/merkle-tree/src/tree_base.ts +++ b/yarn-project/merkle-tree/src/tree_base.ts @@ -241,13 +241,6 @@ export abstract class TreeBase implements MerkleTree { await this.writeMeta(); } - /** - * Initializes the tree from the database. - */ - public async initFromDb(): Promise { - // Implemented only by Indexed Tree to populate the leaf cache. - } - /** * Writes meta data to the provided batch. * @param batch - The batch to which to write the meta data. @@ -307,4 +300,12 @@ export abstract class TreeBase implements MerkleTree { } this.cachedSize = numLeaves + BigInt(leaves.length); } + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param value - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + abstract findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/tsconfig.json b/yarn-project/merkle-tree/tsconfig.json index 831130c7c84..35f81f8b801 100644 --- a/yarn-project/merkle-tree/tsconfig.json +++ b/yarn-project/merkle-tree/tsconfig.json @@ -11,6 +11,9 @@ }, { "path": "../types" + }, + { + "path": "../circuits.js" } ], "include": ["src"] diff --git a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap index 1c00e7c76f5..b9e79a46077 100644 --- a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap +++ b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap @@ -47,7 +47,7 @@ exports[`noir-compiler using nargo generates Aztec.nr external interface 1`] = ` use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; @@ -240,7 +240,7 @@ exports[`noir-compiler using wasm binary generates Aztec.nr external interface 1 use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; diff --git a/yarn-project/noir-compiler/src/cli.ts b/yarn-project/noir-compiler/src/cli.ts index 837975c8075..7416abed4cc 100644 --- a/yarn-project/noir-compiler/src/cli.ts +++ b/yarn-project/noir-compiler/src/cli.ts @@ -3,18 +3,14 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { Command } from 'commander'; -import { compileNoir } from './cli/compileNoir.js'; -import { generateNoirInterface } from './cli/noir-interface.js'; -import { generateTypescriptInterface } from './cli/typescript.js'; +import { addNoirCompilerCommanderActions } from './cli/add_noir_compiler_commander_actions.js'; const program = new Command(); const log = createConsoleLogger('aztec:compiler-cli'); const main = async () => { program.name('aztec-compile'); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'typescript', log); - generateNoirInterface(program, 'interface', log); + addNoirCompilerCommanderActions(program, log); await program.parseAsync(process.argv); }; diff --git a/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts new file mode 100644 index 00000000000..7dff5ecb7b1 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts @@ -0,0 +1,67 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { Command } from 'commander'; + +/** + * CLI options for configuring behavior + */ +interface Options { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + typescript: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + interface: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + compiler: string | undefined; +} + +/** + * + */ +export function addNoirCompilerCommanderActions(program: Command, log: LogFn = () => {}) { + program + .command('compile') + .argument('', 'Path to the bin or Aztec.nr project to compile') + .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') + .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) + .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) + .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm.', 'wasm') + .description('Compiles the Noir Source in the target project') + + .action(async (projectPath: string, options: Options) => { + const { compileNoir } = await import('./compile_noir.js'); + await compileNoir(projectPath, options, log); + }); + + program + .command('generate-typescript') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + + .action(async (projectPath: string, options) => { + const { generateTypescriptInterface } = await import('./generate_typescript_interface.js'); + generateTypescriptInterface(projectPath, options, log); + }); + + return program + .command('generate-noir-interface') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + .action(async (projectPath: string, options) => { + const { generateNoirInterface } = await import('./generate_noir_interface.js'); + generateNoirInterface(projectPath, options, log); + }); +} diff --git a/yarn-project/noir-compiler/src/cli/compileNoir.ts b/yarn-project/noir-compiler/src/cli/compile_noir.ts similarity index 75% rename from yarn-project/noir-compiler/src/cli/compileNoir.ts rename to yarn-project/noir-compiler/src/cli/compile_noir.ts index 7e00fed39fa..f87055464a9 100644 --- a/yarn-project/noir-compiler/src/cli/compileNoir.ts +++ b/yarn-project/noir-compiler/src/cli/compile_noir.ts @@ -1,7 +1,6 @@ import { ContractArtifact } from '@aztec/foundation/abi'; import { LogFn } from '@aztec/foundation/log'; -import { Command } from 'commander'; import { mkdirSync, writeFileSync } from 'fs'; import { mkdirpSync } from 'fs-extra'; import path, { resolve } from 'path'; @@ -34,32 +33,21 @@ interface Options { * @param log - Optional logging function. * @returns The program with the command registered. */ -export function compileNoir(program: Command, name = 'compile', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the bin or Aztec.nr project to compile') - .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') - .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) - .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) - .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') - .description('Compiles the Noir Source in the target project') - - .action(async (projectPath: string, options: Options) => { - const { compiler } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - if (compiler !== 'nargo' && compiler !== 'wasm') { - throw new Error(`Invalid compiler: ${compiler}`); - } +export async function compileNoir(projectPath: string, options: Options, log: LogFn = () => {}) { + const { compiler } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + if (compiler !== 'nargo' && compiler !== 'wasm') { + throw new Error(`Invalid compiler: ${compiler}`); + } - const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; - log(`Compiling ${projectPath} with ${compiler} backend...`); - const results = await compile(projectPath, { log }); - for (const result of results) { - generateOutput(projectPath, result, options, log); - } - }); + const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; + log(`Compiling ${projectPath} with ${compiler} backend...`); + const results = await compile(projectPath, { log }); + for (const result of results) { + generateOutput(projectPath, result, options, log); + } } /** diff --git a/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts new file mode 100644 index 00000000000..4a13a1c756f --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts @@ -0,0 +1,48 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateNoirContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * + */ +export function generateNoirInterface( + projectPath: string, + options: { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); + log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); + try { + const noirInterface = generateNoirContractInterface(contract); + mkdirpSync(path.dirname(interfacePath)); + writeFileSync(interfacePath, noirInterface); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts new file mode 100644 index 00000000000..d004706c257 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts @@ -0,0 +1,57 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateTypescriptContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. + * @param program - Commander program. + * @param log - Optional logging function. + * @returns The program with the command registered. + */ +export function generateTypescriptInterface( + projectPath: string, + options: { + /* eslint-disable jsdoc/require-jsdoc */ + outdir: string; + /* eslint-disable jsdoc/require-jsdoc */ + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); + log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); + let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); + if (relativeArtifactPath === `${contract.name}.json`) { + // relative path edge case, prepending ./ for local import - the above logic just does + // `${contract.name}.json`, which is not a valid import for a file in the same directory + relativeArtifactPath = `./${contract.name}.json`; + } + try { + const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); + mkdirpSync(path.dirname(tsPath)); + writeFileSync(tsPath, tsWrapper); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/index.ts b/yarn-project/noir-compiler/src/cli/index.ts index 0687e01706f..df1ae3d0880 100644 --- a/yarn-project/noir-compiler/src/cli/index.ts +++ b/yarn-project/noir-compiler/src/cli/index.ts @@ -1,3 +1 @@ -export { compileNoir } from './compileNoir.js'; -export { generateNoirInterface } from './noir-interface.js'; -export { generateTypescriptInterface } from './typescript.js'; +export { addNoirCompilerCommanderActions } from './add_noir_compiler_commander_actions.js'; diff --git a/yarn-project/noir-compiler/src/cli/noir-interface.ts b/yarn-project/noir-compiler/src/cli/noir-interface.ts deleted file mode 100644 index 54db5b7b86b..00000000000 --- a/yarn-project/noir-compiler/src/cli/noir-interface.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateNoirContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'interface' command on the given commander program that generates a Noir interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateNoirInterface(program: Command, name = 'interface', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated noir interfaces, relative to the project path', - 'interfaces', - ) - .description('Generates Noir interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); - log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); - try { - const noirInterface = generateNoirContractInterface(contract); - mkdirpSync(path.dirname(interfacePath)); - writeFileSync(interfacePath, noirInterface); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-compiler/src/cli/typescript.ts b/yarn-project/noir-compiler/src/cli/typescript.ts deleted file mode 100644 index cf107cc0338..00000000000 --- a/yarn-project/noir-compiler/src/cli/typescript.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateTypescriptContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateTypescriptInterface(program: Command, name = 'typescript', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated typescript wrappers, relative to the project path', - 'types', - ) - .description('Generates typescript interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); - log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); - let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); - if (relativeArtifactPath === `${contract.name}.json`) { - // relative path edge case, prepending ./ for local import - the above logic just does - // `${contract.name}.json`, which is not a valid import for a file in the same directory - relativeArtifactPath = `./${contract.name}.json`; - } - try { - const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); - mkdirpSync(path.dirname(tsPath)); - writeFileSync(tsPath, tsWrapper); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts b/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts index 356c891564d..8c9380b77ef 100644 --- a/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts +++ b/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts @@ -71,8 +71,11 @@ export class GithubDependencyResolver implements NoirDependencyResolver { async #extractZip(dependency: NoirGitDependencyConfig, archivePath: string): Promise { const gitUrl = new URL(dependency.git); + // extract the archive to this location const extractLocation = join('libs', safeFilename(gitUrl.pathname + '@' + (dependency.tag ?? 'HEAD'))); - const tmpExtractLocation = extractLocation + '.tmp'; + + // where we expect to find this package after extraction + // it might already exist if the archive got unzipped previously const packagePath = join(extractLocation, dependency.directory ?? ''); if (this.#fm.hasFileSync(packagePath)) { @@ -82,24 +85,21 @@ export class GithubDependencyResolver implements NoirDependencyResolver { const { entries } = await unzip(this.#fm.readFileSync(archivePath)); + // extract to a temporary directory, then move it to the final location + // TODO empty the temp directory first + const tmpExtractLocation = extractLocation + '.tmp'; for (const entry of Object.values(entries)) { if (entry.isDirectory) { continue; } + // remove the first path segment, because it'll be the archive name const name = stripSegments(entry.name, 1); - if (dependency.directory && !name.startsWith(dependency.directory)) { - continue; - } const path = join(tmpExtractLocation, name); await this.#fm.writeFile(path, (await entry.blob()).stream()); } - if (dependency.directory) { - this.#fm.moveFileSync(join(tmpExtractLocation, dependency.directory), packagePath); - } else { - this.#fm.moveFileSync(tmpExtractLocation, packagePath); - } + this.#fm.moveFileSync(tmpExtractLocation, extractLocation); return packagePath; } diff --git a/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts b/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts index 36e2e24893b..720b7fe942f 100644 --- a/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts +++ b/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts @@ -1,6 +1,6 @@ import { NoirDependencyConfig } from '@aztec/foundation/noir'; -import { resolve } from 'path'; +import { isAbsolute, join } from 'path'; import { FileManager } from '../file-manager/file-manager.js'; import { NoirPackage } from '../package.js'; @@ -16,12 +16,14 @@ export class LocalDependencyResolver implements NoirDependencyResolver { this.#fm = fm; } - resolveDependency(pkg: NoirPackage, config: NoirDependencyConfig): Promise { + resolveDependency(parent: NoirPackage, config: NoirDependencyConfig): Promise { if ('path' in config) { + const parentPath = parent.getPackagePath(); + const dependencyPath = isAbsolute(config.path) ? config.path : join(parentPath, config.path); return Promise.resolve({ // unknown version, Nargo.toml doesn't have a version field version: undefined, - package: NoirPackage.open(resolve(pkg.getPackagePath(), config.path), this.#fm), + package: NoirPackage.open(dependencyPath, this.#fm), }); } else { return Promise.resolve(null); diff --git a/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts b/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts index ab91b78d8d6..ecc5ffb771d 100644 --- a/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts +++ b/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts @@ -167,7 +167,7 @@ ${callStatement} function generateStaticImports() { return `use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH;`; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH;`; } /** diff --git a/yarn-project/noir-contracts/scripts/catch.sh b/yarn-project/noir-contracts/scripts/catch.sh index 8a0a894b93f..87b485eb3f3 100644 --- a/yarn-project/noir-contracts/scripts/catch.sh +++ b/yarn-project/noir-contracts/scripts/catch.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Handler for SIGCHLD, cleanup if child exit with error, used by nargo_test.sh and compile.sh handle_sigchld() { diff --git a/yarn-project/noir-contracts/scripts/compile.sh b/yarn-project/noir-contracts/scripts/compile.sh index fe2096f4cf4..055485220e0 100755 --- a/yarn-project/noir-contracts/scripts/compile.sh +++ b/yarn-project/noir-contracts/scripts/compile.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -euo pipefail; @@ -19,4 +19,4 @@ build() { export -f build # run 4 builds at a time -echo "$@" | xargs -n 1 -P 4 bash -c 'build "$0"' +echo "$@" | xargs -n 1 -P $(nproc) bash -c 'build "$0"' diff --git a/yarn-project/noir-contracts/scripts/compile_all.sh b/yarn-project/noir-contracts/scripts/compile_all.sh index 32de820a630..7445dbbc854 100755 --- a/yarn-project/noir-contracts/scripts/compile_all.sh +++ b/yarn-project/noir-contracts/scripts/compile_all.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash echo "Compiling all contracts" ./scripts/compile.sh $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/scripts/get_all_contracts.sh b/yarn-project/noir-contracts/scripts/get_all_contracts.sh index caaf81c1a13..1311a4862a4 100755 --- a/yarn-project/noir-contracts/scripts/get_all_contracts.sh +++ b/yarn-project/noir-contracts/scripts/get_all_contracts.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Utility to get the names of all contracts echo $(ls -d src/contracts/*_contract/Nargo.toml | sed -r "s/src\\/contracts\\/(.+)_contract\\/Nargo.toml/\\1/") \ No newline at end of file diff --git a/yarn-project/noir-contracts/scripts/get_all_libraries.sh b/yarn-project/noir-contracts/scripts/get_all_libraries.sh index f1913a46caf..8fbe7bb1b19 100755 --- a/yarn-project/noir-contracts/scripts/get_all_libraries.sh +++ b/yarn-project/noir-contracts/scripts/get_all_libraries.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Utility to get the names of all noir libraries located in ../aztec-nr echo $(ls -d ../aztec-nr/*/Nargo.toml | sed -r "s/..\\/aztec-nr\\/(.+)\\/Nargo.toml/\\1/") \ No newline at end of file diff --git a/yarn-project/noir-contracts/scripts/install_noir.sh b/yarn-project/noir-contracts/scripts/install_noir.sh index 325e9a56620..b1105d6ad46 100755 --- a/yarn-project/noir-contracts/scripts/install_noir.sh +++ b/yarn-project/noir-contracts/scripts/install_noir.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to install noirup and the latest aztec nargo set -eu diff --git a/yarn-project/noir-contracts/scripts/install_noirup.sh b/yarn-project/noir-contracts/scripts/install_noirup.sh index 11ba9b15d31..025237333cd 100755 --- a/yarn-project/noir-contracts/scripts/install_noirup.sh +++ b/yarn-project/noir-contracts/scripts/install_noirup.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Script to install noirup and the latest nargo set -eu diff --git a/yarn-project/noir-contracts/scripts/nargo_check.sh b/yarn-project/noir-contracts/scripts/nargo_check.sh index 10d9d14c2c1..45209d4ee43 100644 --- a/yarn-project/noir-contracts/scripts/nargo_check.sh +++ b/yarn-project/noir-contracts/scripts/nargo_check.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Check nargo version matches the expected one nargo_check() { diff --git a/yarn-project/noir-contracts/scripts/nargo_test.sh b/yarn-project/noir-contracts/scripts/nargo_test.sh index 8468c19f492..0666714388d 100755 --- a/yarn-project/noir-contracts/scripts/nargo_test.sh +++ b/yarn-project/noir-contracts/scripts/nargo_test.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Tests noir contracts, if multiple are provided, then they are testing in parallel, bubbling any testing errors # diff --git a/yarn-project/noir-contracts/scripts/nargo_test_ci.sh b/yarn-project/noir-contracts/scripts/nargo_test_ci.sh index 5a4458d4da5..d835f972324 100755 --- a/yarn-project/noir-contracts/scripts/nargo_test_ci.sh +++ b/yarn-project/noir-contracts/scripts/nargo_test_ci.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Runs tests scripts for all contracts, then for all libraries. ./scripts/nargo_test.sh CONTRACT $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/scripts/types.sh b/yarn-project/noir-contracts/scripts/types.sh index ebbc24bdc5f..3e2410e4004 100755 --- a/yarn-project/noir-contracts/scripts/types.sh +++ b/yarn-project/noir-contracts/scripts/types.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Example: # - this script will automatically be run when running `yarn noir:build` diff --git a/yarn-project/noir-contracts/scripts/types_all.sh b/yarn-project/noir-contracts/scripts/types_all.sh index 5cbb3ce9d91..38081dc6efe 100755 --- a/yarn-project/noir-contracts/scripts/types_all.sh +++ b/yarn-project/noir-contracts/scripts/types_all.sh @@ -1,3 +1,3 @@ -#!/bin/bash +#!/usr/bin/env bash # Run the types script for all files ./scripts/types.sh $(./scripts/get_all_contracts.sh) diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml index 03bfe065166..32e7ab21040 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } value_note = { path = "../../../../aztec-nr/value-note"} +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr index f5f5636132b..560dc4fb8e9 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr @@ -1,6 +1,6 @@ +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use dep::aztec::{ context::{PrivateContext, PublicContext, Context}, - constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}, note::{ note_getter_options::NoteGetterOptions, note_viewer_options::NoteViewerOptions, diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr index 2792859e9b4..9edea67a00c 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr @@ -2,6 +2,7 @@ mod cards; mod game; contract CardGame { + use dep::protocol_types::constants::MAX_NOTES_PER_PAGE; use dep::aztec::{ context::Context, hash::pedersen_hash, @@ -22,7 +23,6 @@ contract CardGame { use dep::aztec::{ abi, - constants_gen::{MAX_NOTES_PER_PAGE}, abi::{ Hasher, PrivateContextInputs, }, diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml index 4fa0ca7d4e2..5eccd21c042 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr index 474fb20c6fc..9e3234812f9 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use dep::aztec::note::{ note_getter_options::NoteGetterOptions, note_viewer_options::NoteViewerOptions, }; diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr index c225942b9fb..6ac7442ea2f 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr @@ -1,5 +1,5 @@ use crate::types::card_note::{CardNote, CARD_NOTE_LEN}; -use dep::aztec::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::note::note_getter_options::{NoteGetterOptions, Sort, SortOrder}; use dep::std::option::Option; diff --git a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml index 21ef226c194..2e29971c5dc 100644 --- a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr index 8e1de0cf189..f428fc88426 100644 --- a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr @@ -1,5 +1,6 @@ contract EasyPrivateVoting { // docs:start:imports + use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; use dep::aztec::{ context::{PrivateContext, Context}, oracle::get_secret_key::get_secret_key, // used to compute nullifier @@ -11,7 +12,6 @@ contract EasyPrivateVoting { field_serialization::{ FieldSerializationMethods, FIELD_SERIALIZED_LEN}, }, types::address::{AztecAddress}, - constants_gen::EMPTY_NULLIFIED_COMMITMENT, }; // docs:end:imports // docs:start:storage_struct diff --git a/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml index 7f5a66f3819..9ef152ea1f6 100644 --- a/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml index b11305196cd..3f884034998 100644 --- a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml @@ -6,4 +6,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } -value_note = { path = "../../../../aztec-nr/value-note" } \ No newline at end of file +value_note = { path = "../../../../aztec-nr/value-note" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr index 462f95091be..d9235ccd6ee 100644 --- a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr @@ -2,6 +2,11 @@ mod utils; // A demonstration of inclusion and non-inclusion proofs. contract InclusionProofs { + use dep::protocol_types::constants::{ + NOTE_HASH_TREE_HEIGHT, + PUBLIC_DATA_TREE_HEIGHT, + GENERATOR_INDEX__PUBLIC_LEAF_INDEX, + }; use dep::std::merkle::compute_merkle_root; use dep::aztec::{ state_vars::{ @@ -20,13 +25,8 @@ contract InclusionProofs { note_header::NoteHeader, utils as note_utils, }, - constants_gen::{ - NOTE_HASH_TREE_HEIGHT, - PUBLIC_DATA_TREE_HEIGHT, - GENERATOR_INDEX__PUBLIC_LEAF_INDEX, - }, + oracle::{ - get_block_header::get_block_header, get_membership_witness::{ get_membership_witness, MembershipWitness, @@ -104,6 +104,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -147,6 +148,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -185,9 +187,9 @@ contract InclusionProofs { ); // 5.c) Prove that the low nullifier is pointing "over" the nullifier to prove that the nullifier is not - // included in the nullifier tree + // included in the nullifier tree (or to 0 if the to-be-inserted nullifier is the largest of all) assert( - full_field_greater_than(witness.leaf_data.next_value, nullifier), + full_field_greater_than(witness.leaf_data.next_value, nullifier) | (witness.leaf_data.next_index == 0), "Proving nullifier non-inclusion failed: low_nullifier.next_value > nullifier.value check failed" ); @@ -218,6 +220,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -250,6 +253,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. diff --git a/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml index 33b61c1b24d..ef3e9d65340 100644 --- a/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml @@ -6,4 +6,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } -safe_math = { path = "../../../../aztec-nr/safe-math" } \ No newline at end of file +safe_math = { path = "../../../../aztec-nr/safe-math" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr b/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr index 53385ed3e99..6b2cfa5c629 100644 --- a/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr +++ b/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr @@ -4,7 +4,7 @@ use dep::aztec::context::{ }; use crate::asset::Asset; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; use dep::aztec::selector::compute_selector; struct PriceFeed { diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml index 319c15b9ad4..cc97a1ddeee 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml @@ -8,3 +8,4 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } field_note = { path = "../../../../aztec-nr/field-note" } token_portal_content_hash_lib = { path = "../token_portal_content_hash_lib" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr b/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr index 1bb62e9f3f6..51bd95002ae 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr +++ b/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr @@ -2,7 +2,7 @@ use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; struct AStructTestCodeGenStruct { amount: Field, diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr index 186b9220cfb..1515b39d8a1 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr @@ -1,7 +1,7 @@ // A contract used for testing a random hodgepodge of small features from simulator and end-to-end tests. contract Test { use dep::std::option::Option; - + use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; // docs:start:unencrypted_import use dep::aztec::log::emit_unencrypted_log; // docs:end:unencrypted_import @@ -24,7 +24,6 @@ contract Test { state_vars::immutable_singleton::ImmutableSingleton, log::emit_unencrypted_log_from_private, types::vec::BoundedVec, - constants_gen::EMPTY_NULLIFIED_COMMITMENT, }; use dep::token_portal_content_hash_lib::{get_mint_private_content_hash, get_mint_public_content_hash}; use dep::field_note::field_note::{FieldNote, FieldNoteMethods, FIELD_NOTE_LEN}; diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml index 25ad4026709..15cff362a4b 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml @@ -8,4 +8,5 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } safe_math = { path = "../../../../aztec-nr/safe-math" } field_note = { path = "../../../../aztec-nr/field-note" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr index 5c3da5897b6..ae54a03ea76 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr index 978676cc22e..55ef7ff36e9 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -5,7 +6,6 @@ use dep::aztec::{ utils::compute_note_hash_for_read_or_nullify, }, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, hash::pedersen_hash, diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml index 13ddd27395c..4fc1814f588 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml @@ -7,4 +7,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } safe_math = { path = "../../../../aztec-nr/safe-math" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr index 5c3da5897b6..ae54a03ea76 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr +++ b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr index cc366cc2277..8c4f1721cf9 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr +++ b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -5,7 +6,6 @@ use dep::aztec::{ utils::compute_note_hash_for_read_or_nullify, }, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, hash::pedersen_hash, diff --git a/yarn-project/noir-contracts/src/scripts/compile.sh b/yarn-project/noir-contracts/src/scripts/compile.sh index 551f7a74869..adedfaeba4f 100755 --- a/yarn-project/noir-contracts/src/scripts/compile.sh +++ b/yarn-project/noir-contracts/src/scripts/compile.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Example: diff --git a/yarn-project/noir-protocol-circuits/package.json b/yarn-project/noir-protocol-circuits/package.json index 2b4b8351e53..f5eccb835a5 100644 --- a/yarn-project/noir-protocol-circuits/package.json +++ b/yarn-project/noir-protocol-circuits/package.json @@ -12,7 +12,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "noir:build": "cd src && ../../../noir/target/release/nargo compile && rm -rf ./target/debug_*", + "noir:build": "cd src && ../../../noir/target/release/nargo compile --silence-warnings && rm -rf ./target/debug_*", "noir:types": "yarn ts-node --esm src/scripts/generate_ts_from_abi.ts && yarn formatting:fix", "noir:test": "cd src && ../../../noir/target/release/nargo test", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests" diff --git a/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap b/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap index 4cf926dc96a..850acabc9de 100644 --- a/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap +++ b/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap @@ -104,7 +104,7 @@ exports[`Private kernel Executes private kernel init circuit for a contract depl KernelCircuitPublicInputs { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 10561895175368852737061915973188839857007468377789560793687187642867659280638n, "asBuffer": { "data": [ @@ -25720,7 +25720,7 @@ exports[`Private kernel Executes private kernel inner for a nested call 1`] = ` KernelCircuitPublicInputs { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 4141256197271035428567950264296887925803599654022881395228888440470800002298n, "asBuffer": { "data": [ @@ -51336,7 +51336,7 @@ exports[`Private kernel Executes private kernel ordering after a deployment 1`] KernelCircuitPublicInputsFinal { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 10561895175368852737061915973188839857007468377789560793687187642867659280638n, "asBuffer": { "data": [ diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml index 47c3f8af754..4132c7e2b18 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr index 8653d993183..c49b4843d22 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr @@ -1,11 +1,3 @@ -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_NULLIFIERS_PER_CALL, - MAX_NEW_L2_TO_L1_MSGS_PER_CALL, - MAX_NEW_COMMITMENTS_PER_CALL, - MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, - MAX_READ_REQUESTS_PER_CALL, -}; use dep::types::{ abis::{ call_request::CallRequest, @@ -21,6 +13,14 @@ use dep::types::{ }, address::{Address, EthAddress}, contrakt::deployment_data::ContractDeploymentData, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_NULLIFIERS_PER_CALL, + MAX_NEW_L2_TO_L1_MSGS_PER_CALL, + MAX_NEW_COMMITMENTS_PER_CALL, + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, + MAX_READ_REQUESTS_PER_CALL, + }, hash::{ compute_constructor_hash, compute_l2_to_l1_hash, diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr index 88a34b4049c..b20c6b963b0 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr @@ -1,5 +1,4 @@ use crate::common; -use dep::aztec::constants_gen::EMPTY_NULLIFIED_COMMITMENT; use dep::std::unsafe; use dep::types::{ abis::{ @@ -7,6 +6,7 @@ use dep::types::{ private_kernel::private_call_data::PrivateCallData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, KernelCircuitPublicInputsBuilder}, }, + constants::EMPTY_NULLIFIED_COMMITMENT, mocked::{Proof, verify_previous_kernel_state}, transaction::request::TxRequest, utils::arrays::is_empty_array, @@ -117,7 +117,7 @@ impl PrivateKernelInputsInit { mod tests { use crate::private_kernel_init::PrivateKernelInputsInit; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_READ_REQUESTS_PER_CALL, }; use dep::types::{ diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr index cdee6d1b941..a89e4010bab 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -81,7 +81,7 @@ impl PrivateKernelInputsInner { mod tests { use crate::private_kernel_inner::PrivateKernelInputsInner; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr index 057317729aa..0fe7d3283e9 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr @@ -1,10 +1,4 @@ use crate::common; -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_COMMITMENTS_PER_TX, - MAX_NEW_NULLIFIERS_PER_TX, - MAX_READ_REQUESTS_PER_TX, -}; use dep::std::unsafe; use dep::types::{ abis::{ @@ -15,6 +9,12 @@ use dep::types::{ KernelCircuitPublicInputsFinal, }, }, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_COMMITMENTS_PER_TX, + MAX_NEW_NULLIFIERS_PER_TX, + MAX_READ_REQUESTS_PER_TX, + }, hash::{ compute_commitment_nonce, compute_unique_siloed_commitment, @@ -160,7 +160,7 @@ impl PrivateKernelInputsOrdering { mod tests { use crate::private_kernel_ordering::PrivateKernelInputsOrdering; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_READ_REQUESTS_PER_TX, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml index 76525900176..669bac30a78 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr index 8bd199879b5..a83cda75565 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr @@ -17,6 +17,16 @@ use dep::types::{ storage_read::StorageRead, storage_update_request::StorageUpdateRequest, }, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_L2_TO_L1_MSGS_PER_CALL, + MAX_NEW_COMMITMENTS_PER_CALL, + MAX_NEW_NULLIFIERS_PER_CALL, + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_READS_PER_CALL, + MAX_READ_REQUESTS_PER_CALL, + }, hash::{silo_commitment, silo_nullifier, compute_l2_to_l1_hash, accumulate_sha256}, utils::{ arrays::{array_length, array_to_bounded_vec, is_empty_array, struct_array_length, struct_array_to_bounded_vec}, @@ -26,17 +36,6 @@ use dep::types::{ }; use crate::hash::{compute_public_data_tree_index, compute_public_data_tree_value}; -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_L2_TO_L1_MSGS_PER_CALL, - MAX_NEW_COMMITMENTS_PER_CALL, - MAX_NEW_NULLIFIERS_PER_CALL, - MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_PUBLIC_DATA_READS_PER_CALL, - MAX_READ_REQUESTS_PER_CALL, -}; - // Validates inputs to the kernel circuit that are common to all invocation scenarios. pub fn validate_inputs(public_call: PublicCallData){ // Validates commons inputs for all type of kernel inputs diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr index 7a3cc9a5f0e..1769f9f87f2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr @@ -1,15 +1,13 @@ -use dep::types::address::Address; -use dep::aztec::{ - constants_gen, - constants_gen::{GENERATOR_INDEX__PUBLIC_LEAF_INDEX}, - hash::sha256_to_field, +use dep::types::{ + address::Address, + constants::GENERATOR_INDEX__PUBLIC_LEAF_INDEX, }; pub fn compute_public_data_tree_index(contract_address: Address, storage_slot: Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ contract_address.to_field(), storage_slot - ], constants_gen::GENERATOR_INDEX__PUBLIC_LEAF_INDEX) + ], GENERATOR_INDEX__PUBLIC_LEAF_INDEX) } pub fn compute_public_data_tree_value(value: Field) -> Field { diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr index 6527e18c741..550f14151e3 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr @@ -88,7 +88,7 @@ mod tests { bounded_vec::BoundedVec, }, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr index 46d4f485504..58c5b2ab399 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr @@ -76,7 +76,7 @@ mod tests { arrays::array_eq, }, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, }; diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml index d131d25b72c..a82bcabc543 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml index a1aebfa70a6..a6bf17d1903 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml index 5eb41be2441..9afbdf6b774 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml index 9d972e291cc..f4b1e9b3557 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml index 9a1b9b54fda..0a1dcaa0012 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr index 54546a9e9fb..4594e4ea4c3 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr @@ -1,5 +1,5 @@ // TODO(Kev): This constant is also defined in private-kernel-lib -use dep::aztec::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::types::constants::NUM_FIELDS_PER_SHA256; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; use dep::types::mocked::AggregationObject; diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr index 410826c29c3..d784b16b5ff 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr @@ -3,7 +3,7 @@ use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; struct ConstantRollupData { // The very latest roots as at the very beginning of the entire rollup: - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, + archive_snapshot : AppendOnlyTreeSnapshot, // TODO(Sean): Some members of this struct tbd private_kernel_vk_tree_root : Field, @@ -16,7 +16,7 @@ struct ConstantRollupData { impl ConstantRollupData { pub fn eq(self, other : ConstantRollupData) -> bool { - self.start_blocks_tree_snapshot.eq(other.start_blocks_tree_snapshot) & + self.archive_snapshot.eq(other.archive_snapshot) & self.global_variables.eq(other.global_variables) & (self.private_kernel_vk_tree_root == other.private_kernel_vk_tree_root) & (self.public_kernel_vk_tree_root == other.public_kernel_vk_tree_root) & diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr index ba521f5f196..71e88760209 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use dep::types::constants::GENERATOR_INDEX__GLOBAL_VARIABLES; struct GlobalVariables { chain_id : Field, @@ -16,7 +16,7 @@ impl GlobalVariables { self.block_number, self.timestamp ], - constants_gen::GENERATOR_INDEX__GLOBAL_VARIABLES, + GENERATOR_INDEX__GLOBAL_VARIABLES, ) } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr index bd848be2463..db92815baa0 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr @@ -1,6 +1,6 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; use dep::types::abis::membership_witness::VKMembershipWitness; -use dep::aztec::constants_gen::ROLLUP_VK_TREE_HEIGHT; +use dep::types::constants::ROLLUP_VK_TREE_HEIGHT; use dep::types::mocked::{Proof, VerificationKey}; struct PreviousRollupData{ diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr index a453771ce15..79ef5693a1d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -8,7 +8,7 @@ use dep::types::utils::uint256::U256; use dep::types::abis::public_data_update_request::PublicDataUpdateRequest; use dep::types::abis::public_data_read::PublicDataRead; use dep::types::mocked::{AggregationObject, Proof}; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, @@ -33,7 +33,7 @@ use dep::aztec::constants_gen::{ }; use dep::types::abis::previous_kernel_data::PreviousKernelData; use dep::types::abis::membership_witness::{NullifierMembershipWitness, MembershipWitness}; -use dep::types::abis::membership_witness::BlocksTreeRootMembershipWitness; +use dep::types::abis::membership_witness::ArchiveRootMembershipWitness; struct BaseRollupInputs { kernel_data: [PreviousKernelData; KERNELS_PER_BASE_ROLLUP], @@ -41,7 +41,7 @@ struct BaseRollupInputs { start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot, start_contract_tree_snapshot: AppendOnlyTreeSnapshot, start_public_data_tree_root: Field, - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot, + archive_snapshot: AppendOnlyTreeSnapshot, sorted_new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP], sorted_new_nullifiers_indexes: [u32; MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP], @@ -56,7 +56,7 @@ struct BaseRollupInputs { new_public_data_update_requests_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_BASE_ROLLUP], new_public_data_reads_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP], - blocks_tree_root_membership_witnesses: [BlocksTreeRootMembershipWitness; KERNELS_PER_BASE_ROLLUP], + archive_root_membership_witnesses: [ArchiveRootMembershipWitness; KERNELS_PER_BASE_ROLLUP], constants: ConstantRollupData, } @@ -113,7 +113,7 @@ impl BaseRollupInputs { let calldata_hash = BaseRollupInputs::components_compute_kernel_calldata_hash(self.kernel_data); // Perform membership checks that the notes provided exist within the historical trees data - self.perform_blocks_tree_membership_checks(); + self.perform_archive_membership_checks(); let aggregation_object = self.aggregate_proofs(); @@ -375,24 +375,24 @@ impl BaseRollupInputs { // Check that the block header used by each kernel is a member of the blocks tree --> since the block header // contains roots of all the trees this is sufficient to verify that the tree roots used by kernels are correct - fn perform_blocks_tree_membership_checks(self) { + fn perform_archive_membership_checks(self) { // For each of the block header (their block hashes), we need to do an inclusion proof // against the blocks tree root from the beginning of a rollup provided in the rollup constants - let blocks_treee_root = self.constants.start_blocks_tree_snapshot.root; + let archive_root = self.constants.archive_snapshot.root; for i in 0..KERNELS_PER_BASE_ROLLUP { // Rebuild the block hash let block_header = self.kernel_data[i].public_inputs.constants.block_header; let previous_block_hash = block_header.block.hash(); - let previous_block_hash_witness = self.blocks_tree_root_membership_witnesses[i]; + let previous_block_hash_witness = self.archive_root_membership_witnesses[i]; // Now check that the previous block hash is in the blocks tree from the beginning of the rollup components::assert_check_membership( previous_block_hash, previous_block_hash_witness.leaf_index, previous_block_hash_witness.sibling_path, - blocks_treee_root + archive_root ); } } @@ -538,11 +538,11 @@ mod tests { tests::merkle_tree_utils::{NonEmptyMerkleTree, compute_zero_hashes}, components, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ CONTRACT_SUBTREE_SIBLING_PATH_LENGTH, CONTRACT_TREE_HEIGHT, CONTRACT_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, KERNELS_PER_BASE_ROLLUP, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP, @@ -554,9 +554,10 @@ mod tests { NULLIFIER_TREE_HEIGHT, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, + NUM_FIELDS_PER_SHA256, }; use dep::types::{ - abis::membership_witness::BlocksTreeRootMembershipWitness, + abis::membership_witness::ArchiveRootMembershipWitness, abis::membership_witness::NullifierMembershipWitness, abis::new_contract_data::NewContractData, abis::public_data_read::PublicDataRead, @@ -731,13 +732,13 @@ mod tests { let mut start_public_data_tree = NonEmptyMerkleTree::new(self.pre_existing_public_data, [0; PUBLIC_DATA_TREE_HEIGHT], [0; PUBLIC_DATA_TREE_HEIGHT - 5], [0; 5]); let start_public_data_tree_root = start_public_data_tree.get_root(); - let start_blocks_tree = NonEmptyMerkleTree::new(self.pre_existing_blocks, [0; BLOCKS_TREE_HEIGHT], [0; BLOCKS_TREE_HEIGHT - 1], [0; 1]); - let start_blocks_tree_snapshot = AppendOnlyTreeSnapshot { - root: start_blocks_tree.get_root(), - next_available_leaf_index: start_blocks_tree.get_next_available_index() as u32, + let start_archive = NonEmptyMerkleTree::new(self.pre_existing_blocks, [0; ARCHIVE_HEIGHT], [0; ARCHIVE_HEIGHT - 1], [0; 1]); + let archive_snapshot = AppendOnlyTreeSnapshot { + root: start_archive.get_root(), + next_available_leaf_index: start_archive.get_next_available_index() as u32, }; - self.constants.start_blocks_tree_snapshot = start_blocks_tree_snapshot; + self.constants.archive_snapshot = archive_snapshot; let mut new_public_data_reads_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP] = dep::std::unsafe::zeroed(); @@ -786,7 +787,7 @@ mod tests { start_nullifier_tree_snapshot, start_contract_tree_snapshot, start_public_data_tree_root, - start_blocks_tree_snapshot, + archive_snapshot, sorted_new_nullifiers, sorted_new_nullifiers_indexes, @@ -800,14 +801,14 @@ mod tests { new_public_data_update_requests_sibling_paths, new_public_data_reads_sibling_paths, - blocks_tree_root_membership_witnesses: [ - BlocksTreeRootMembershipWitness { + archive_root_membership_witnesses: [ + ArchiveRootMembershipWitness { leaf_index: 0, - sibling_path: start_blocks_tree.get_sibling_path(0) + sibling_path: start_archive.get_sibling_path(0) }, - BlocksTreeRootMembershipWitness { + ArchiveRootMembershipWitness { leaf_index: 1, - sibling_path: start_blocks_tree.get_sibling_path(1) + sibling_path: start_archive.get_sibling_path(1) }, ], @@ -1144,10 +1145,10 @@ mod tests { } #[test(should_fail_with = "membership check failed")] - unconstrained fn compute_membership_blocks_tree_negative() { + unconstrained fn compute_membership_archive_negative() { let mut inputs = BaseRollupInputsBuilder::new().build_inputs(); - inputs.blocks_tree_root_membership_witnesses[0].sibling_path[0] = 27; + inputs.archive_root_membership_witnesses[0].sibling_path[0] = 27; let _output = inputs.base_rollup_circuit(); } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr index 4eb6d8bffd8..2a67a1acd96 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr @@ -2,7 +2,7 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInpu use dep::types::mocked::AggregationObject; use dep::types::hash::{accumulate_sha256, assert_check_membership, root_from_sibling_path}; use dep::types::utils::uint128::U128; -use dep::aztec::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::types::constants::NUM_FIELDS_PER_SHA256; use crate::abis::previous_rollup_data::PreviousRollupData; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr index c024f1695e2..55f7d242e7e 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr @@ -1,5 +1,5 @@ use crate::abis::global_variables::GlobalVariables; -use dep::aztec::constants_gen; +use dep::types::constants::GENERATOR_INDEX__BLOCK_HASH; pub fn compute_block_hash_with_globals( globals : GlobalVariables, @@ -11,5 +11,5 @@ pub fn compute_block_hash_with_globals( let inputs = [globals.hash(), note_hash_tree_root, nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, public_data_tree_root]; - dep::std::hash::pedersen_hash_with_separator(inputs, constants_gen::GENERATOR_INDEX__BLOCK_HASH) + dep::std::hash::pedersen_hash_with_separator(inputs, GENERATOR_INDEX__BLOCK_HASH) } \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index 175e2afc7b1..a2e29866aca 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -4,7 +4,7 @@ mod root_rollup_public_inputs; use root_rollup_public_inputs::RootRollupPublicInputs; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use dep::types::utils::uint256::U256; -use dep::aztec::constants_gen::{NUM_FIELDS_PER_SHA256,NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,L1_TO_L2_MSG_SUBTREE_HEIGHT}; +use dep::types::constants::{NUM_FIELDS_PER_SHA256,NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,L1_TO_L2_MSG_SUBTREE_HEIGHT}; use crate::{components, hash::compute_block_hash_with_globals}; use crate::merkle_tree::{calculate_subtree, calculate_empty_tree_root}; @@ -37,7 +37,7 @@ impl RootRollupInputs { ); // Build the block hash for this iteration from the tree roots and global variables - // Then insert the block into the blocks tree + // Then insert the block into the archive tree let block_hash = compute_block_hash_with_globals(left.constants.global_variables, right.end_note_hash_tree_snapshot.root, right.end_nullifier_tree_snapshot.root, @@ -45,10 +45,10 @@ impl RootRollupInputs { new_l1_to_l2_messages_tree_snapshot.root, right.end_public_data_tree_root); - // Update the blocks tree - let end_blocks_tree_snapshot = components::insert_subtree_to_snapshot_tree( - self.start_blocks_tree_snapshot, - self.new_blocks_tree_sibling_path, + // Update the archive + let end_archive_snapshot = components::insert_subtree_to_snapshot_tree( + self.start_archive_snapshot, + self.new_archive_sibling_path, 0, block_hash, 0 @@ -72,8 +72,8 @@ impl RootRollupInputs { end_public_data_tree_root : right.end_public_data_tree_root, start_l1_to_l2_messages_tree_snapshot : self.start_l1_to_l2_messages_tree_snapshot, end_l1_to_l2_messages_tree_snapshot : new_l1_to_l2_messages_tree_snapshot, - start_blocks_tree_snapshot : self.start_blocks_tree_snapshot, - end_blocks_tree_snapshot : end_blocks_tree_snapshot, + start_archive_snapshot : self.start_archive_snapshot, + end_archive_snapshot : end_archive_snapshot, calldata_hash : components::compute_calldata_hash(self.previous_rollup_data), l1_to_l2_messages_hash : compute_messages_hash(self.new_l1_to_l2_messages), } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr index 5c2fa4075ff..37f64928e8b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr @@ -3,10 +3,10 @@ use crate::abis::previous_rollup_data::PreviousRollupData; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - BLOCKS_TREE_HEIGHT + ARCHIVE_HEIGHT }; struct RootRollupInputs { @@ -20,6 +20,6 @@ struct RootRollupInputs { start_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, // inputs required to add the block hash - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, - new_blocks_tree_sibling_path : [Field; BLOCKS_TREE_HEIGHT], + start_archive_snapshot : AppendOnlyTreeSnapshot, + new_archive_sibling_path : [Field; ARCHIVE_HEIGHT], } \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr index 53356195357..c4c2f931d8b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr @@ -2,7 +2,7 @@ use crate::abis::nullifier_leaf_preimage::NullifierLeafPreimage; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; use crate::abis::global_variables::GlobalVariables; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ NUM_FIELDS_PER_SHA256 }; use dep::types::mocked::AggregationObject; @@ -29,8 +29,8 @@ struct RootRollupPublicInputs { start_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, end_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, - end_blocks_tree_snapshot : AppendOnlyTreeSnapshot, + start_archive_snapshot : AppendOnlyTreeSnapshot, + end_archive_snapshot : AppendOnlyTreeSnapshot, calldata_hash : [Field; NUM_FIELDS_PER_SHA256], l1_to_l2_messages_hash : [Field; NUM_FIELDS_PER_SHA256], diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr index 7da05d2df6d..0a095423bc6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr @@ -3,11 +3,11 @@ use crate::{ root_rollup_inputs::RootRollupInputs, }, }; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, }; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::tests::previous_rollup_data::default_previous_rollup_data; @@ -25,10 +25,10 @@ pub fn compute_l1_l2_empty_snapshot() -> (AppendOnlyTreeSnapshot, [Field; L1_TO_ (AppendOnlyTreeSnapshot{ root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, new_l1_to_l2_messages_tree_root_sibling_path) } -pub fn compute_blocks_tree_snapshot() -> (AppendOnlyTreeSnapshot, [Field; BLOCKS_TREE_HEIGHT]) { - let zero_hashes = compute_zero_hashes([0; BLOCKS_TREE_HEIGHT]); - let mut sibling_path = [0; BLOCKS_TREE_HEIGHT]; - for i in 1..BLOCKS_TREE_HEIGHT { +pub fn compute_archive_snapshot() -> (AppendOnlyTreeSnapshot, [Field; ARCHIVE_HEIGHT]) { + let zero_hashes = compute_zero_hashes([0; ARCHIVE_HEIGHT]); + let mut sibling_path = [0; ARCHIVE_HEIGHT]; + for i in 1..ARCHIVE_HEIGHT { sibling_path[i] = zero_hashes[i-1]; } (AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, sibling_path) @@ -42,10 +42,10 @@ pub fn default_root_rollup_inputs() -> RootRollupInputs { inputs.new_l1_to_l2_messages_tree_root_sibling_path = l1_l2_empty_sibling_path; inputs.start_l1_to_l2_messages_tree_snapshot = l1_l2_empty_snapshot; - let (blocks_snapshot, blocks_sibling_path) = compute_blocks_tree_snapshot(); + let (blocks_snapshot, blocks_sibling_path) = compute_archive_snapshot(); - inputs.start_blocks_tree_snapshot = blocks_snapshot; - inputs.new_blocks_tree_sibling_path = blocks_sibling_path; + inputs.start_archive_snapshot = blocks_snapshot; + inputs.new_archive_sibling_path = blocks_sibling_path; inputs.previous_rollup_data = default_previous_rollup_data(); diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml index 5a7509f4318..6c8b6657f62 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml @@ -5,4 +5,3 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr index 9616da0aa8c..390c59c94aa 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr @@ -1,7 +1,7 @@ use crate::block::Block; struct BlockHeader { - blocks_tree_root : Field, + archive_root : Field, block : Block, // Private data // This is marked in the cpp code as an enhancement @@ -24,7 +24,7 @@ impl BlockHeader { self.block.nullifier_tree_root, self.block.contract_tree_root, self.block.l1_to_l2_messages_tree_root, - self.blocks_tree_root, + self.archive_root, self.block.public_data_tree_root, self.block.global_variables_hash ] diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr index 37259a7e675..012ab922e5d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr @@ -1,6 +1,6 @@ use crate::abis::function_selector::FunctionSelector; use crate::address::{EthAddress,Address}; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CALL_CONTEXT; struct CallContext{ msg_sender : Address, @@ -24,7 +24,7 @@ impl CallContext { self.is_delegate_call as Field, self.is_static_call as Field, self.is_contract_deployment as Field, - ], constants_gen::GENERATOR_INDEX__CALL_CONTEXT) + ], GENERATOR_INDEX__CALL_CONTEXT) } fn assert_is_zero(self) { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr index fc6a754c50d..ea323fba8fe 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr @@ -4,7 +4,7 @@ use crate::abis::{ public_circuit_public_inputs::PublicCircuitPublicInputs, }; use crate::address::Address; -use dep::aztec::constants_gen::{ +use crate::constants::{ GENERATOR_INDEX__CALL_STACK_ITEM, }; diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr index 6d39c700d92..e48dc871220 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr @@ -10,7 +10,7 @@ use crate::{ mocked::AggregationObject, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_TX, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr index ad812b626a9..438ae6c61c6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr @@ -1,5 +1,5 @@ use crate::abis::function_selector::FunctionSelector; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__FUNCTION_DATA; struct FunctionData { // First four bytes of the abi encoding @@ -21,6 +21,6 @@ impl FunctionData { self.is_internal as Field, self.is_private as Field, self.is_constructor as Field, - ], constants_gen::GENERATOR_INDEX__FUNCTION_DATA) + ], GENERATOR_INDEX__FUNCTION_DATA) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr index 022ec5a718e..2a66a161736 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr @@ -1,5 +1,5 @@ use crate::abis::function_selector::FunctionSelector; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__FUNCTION_LEAF; struct FunctionLeafPreimage { selector : FunctionSelector, @@ -17,6 +17,6 @@ impl FunctionLeafPreimage { self.is_private as Field, self.vk_hash, self.acir_hash - ], constants_gen::GENERATOR_INDEX__FUNCTION_LEAF) + ], GENERATOR_INDEX__FUNCTION_LEAF) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr index 6b4c29dcb71..efead3d78a2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr @@ -1,11 +1,11 @@ -use dep::aztec::constants_gen::{ +use crate::constants::{ CONTRACT_TREE_HEIGHT, FUNCTION_TREE_HEIGHT, KERNELS_PER_BASE_ROLLUP, NULLIFIER_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, ROLLUP_VK_TREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, }; struct MembershipWitness { @@ -37,9 +37,9 @@ struct NullifierMembershipWitness{ sibling_path: [Field; NULLIFIER_TREE_HEIGHT] } -struct BlocksTreeRootMembershipWitness{ +struct ArchiveRootMembershipWitness{ leaf_index: Field, - sibling_path: [Field; BLOCKS_TREE_HEIGHT] + sibling_path: [Field; ARCHIVE_HEIGHT] } struct ReadRequestMembershipWitness { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr index aba0cda1789..784b698da5b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr @@ -1,5 +1,5 @@ use crate::address::{Address, EthAddress}; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CONTRACT_LEAF; struct NewContractData { contract_address: Address, @@ -36,7 +36,7 @@ impl NewContractData { self.contract_address.to_field(), self.portal_contract_address.to_field(), self.function_tree_root, - ], constants_gen::GENERATOR_INDEX__CONTRACT_LEAF) + ], GENERATOR_INDEX__CONTRACT_LEAF) } } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr index d65dc83a883..5733acef236 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr @@ -1,5 +1,5 @@ use crate::mocked::{Proof, VerificationKey}; -use dep::aztec::constants_gen::VK_TREE_HEIGHT; +use crate::constants::VK_TREE_HEIGHT; use crate::abis::kernel_circuit_public_inputs::KernelCircuitPublicInputs; //TODO(sean): left a note saying that this should not be called Previous diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr index b13e029a926..8bee165fda4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr @@ -7,9 +7,7 @@ use crate::{ hash::NUM_FIELDS_PER_SHA256, utils::bounded_vec::BoundedVec, }; -use dep::aztec::{ - constants_gen, - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, @@ -18,7 +16,8 @@ use dep::aztec::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, RETURN_VALUES_LENGTH, - } + PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, + GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS, }; struct PrivateCircuitPublicInputs { @@ -57,7 +56,7 @@ struct PrivateCircuitPublicInputs { impl PrivateCircuitPublicInputs { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3059) : Reuse aztec-nr fn hash(self) -> Field { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(0); fields.push(self.call_context.hash()); fields.push(self.args_hash); fields.push_array(self.return_values); @@ -78,8 +77,8 @@ impl PrivateCircuitPublicInputs { fields.push(self.chain_id); fields.push(self.version); - assert_eq(fields.len(), constants_gen::PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PrivateCircuitPublicInputs"); + assert_eq(fields.len(), PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PrivateCircuitPublicInputs"); - dep::std::hash::pedersen_hash_with_separator(fields.storage, constants_gen::GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS) + dep::std::hash::pedersen_hash_with_separator(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr index 2ec38255474..23414ce6530 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr @@ -9,7 +9,7 @@ use crate::abis::{ }; use crate::address::EthAddress; use crate::mocked::{Proof, VerificationKey}; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_READ_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr index 6310f119440..bc59e74e109 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr @@ -6,7 +6,7 @@ use crate::{ address::EthAddress, mocked::Proof, }; -use dep::aztec::constants_gen::MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL; +use crate::constants::MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL; struct PublicCallData { call_stack_item: PublicCallStackItem, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr index abcaf60e7af..69b6ab7af68 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr @@ -1,5 +1,4 @@ -use dep::aztec::constants_gen; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, @@ -7,6 +6,8 @@ use dep::aztec::constants_gen::{ MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, RETURN_VALUES_LENGTH, + GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS, + PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH }; use crate::{ abis::{ @@ -50,7 +51,7 @@ struct PublicCircuitPublicInputs{ impl PublicCircuitPublicInputs{ // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3059): Reuse aztec-nr fn hash(self) -> Field { - let mut inputs: BoundedVec = BoundedVec::new(0); + let mut inputs: BoundedVec = BoundedVec::new(0); inputs.push(self.call_context.hash()); inputs.push(self.args_hash); inputs.push_array(self.return_values); @@ -69,8 +70,8 @@ impl PublicCircuitPublicInputs{ inputs.push_array(self.block_header.to_array()); inputs.push(self.prover_address.to_field()); - assert_eq(inputs.len(), constants_gen::PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PublicCircuitPublicInputs"); + assert_eq(inputs.len(), PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PublicCircuitPublicInputs"); - dep::std::hash::pedersen_hash_with_separator(inputs.storage, constants_gen::GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS) + dep::std::hash::pedersen_hash_with_separator(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr index 94c8a02d087..0fefc927168 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; struct PublicDataRead { leaf_index : Field, @@ -10,7 +10,7 @@ impl PublicDataRead { dep::std::hash::pedersen_hash_with_separator([ self.leaf_index, self.value, - ], constants_gen::GENERATOR_INDEX__PUBLIC_DATA_READ) + ], GENERATOR_INDEX__PUBLIC_DATA_READ) } pub fn empty() -> Self { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr index 458d6ec9549..2e6c51ccaa7 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; struct PublicDataUpdateRequest { leaf_index : Field, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr index dc15b932bdd..a692491d3f4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__BLOCK_HASH; // This is not in the cpp code. struct Block { @@ -31,6 +31,6 @@ impl Block { self.contract_tree_root, self.l1_to_l2_messages_tree_root, self.public_data_tree_root, - ], constants_gen::GENERATOR_INDEX__BLOCK_HASH) + ], GENERATOR_INDEX__BLOCK_HASH) } } \ No newline at end of file diff --git a/yarn-project/aztec-nr/aztec/src/constants_gen.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr similarity index 98% rename from yarn-project/aztec-nr/aztec/src/constants_gen.nr rename to yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr index d1d77e5b81c..f8a5c60bb62 100644 --- a/yarn-project/aztec-nr/aztec/src/constants_gen.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr @@ -73,7 +73,7 @@ global CONTRACT_SUBTREE_SIBLING_PATH_LENGTH: Field = 15; global NOTE_HASH_SUBTREE_HEIGHT: Field = 7; global NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH: Field = 25; global NULLIFIER_SUBTREE_HEIGHT: Field = 7; -global BLOCKS_TREE_HEIGHT: Field = 16; +global ARCHIVE_HEIGHT: Field = 16; global NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH: Field = 13; global L1_TO_L2_MSG_SUBTREE_HEIGHT: Field = 4; global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: Field = 12; @@ -82,6 +82,9 @@ global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: Field = 12; global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; global MAPPING_SLOT_PEDERSEN_SEPARATOR: Field = 4; global NUM_FIELDS_PER_SHA256: Field = 2; +global ARGS_HASH_CHUNK_LENGTH: u32 = 32; +global ARGS_HASH_CHUNK_COUNT: u32 = 16; + // NOIR CONSTANTS - constants used only in yarn-packages/noir-contracts // Some are defined here because Noir doesn't yet support globals referencing other globals yet. diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr index d43f3d5ab2c..083122ff942 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr @@ -1,6 +1,6 @@ use crate::address::EthAddress; use crate::point::Point; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA; struct ContractDeploymentData { deployer_public_key : Point, @@ -27,6 +27,6 @@ impl ContractDeploymentData { self.function_tree_root, self.contract_address_salt, self.portal_contract_address.to_field() - ], constants_gen::GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA) + ], GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr index cc7911471f1..71de424e162 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; struct StorageRead{ storage_slot: Field, @@ -17,7 +17,7 @@ impl StorageRead { dep::std::hash::pedersen_hash_with_separator([ self.storage_slot, self.current_value, - ], constants_gen::GENERATOR_INDEX__PUBLIC_DATA_READ) + ], GENERATOR_INDEX__PUBLIC_DATA_READ) } pub fn is_empty(self) -> bool { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr index ba33f3985ba..7d1f90b0d89 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; struct StorageUpdateRequest{ storage_slot : Field, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr index a331b8046be..a792107a9cd 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr @@ -8,13 +8,70 @@ use crate::abis::function_data::FunctionData; use crate::utils::uint128::U128; use crate::utils::uint256::U256; use crate::utils::bounded_vec::BoundedVec; - -use dep::aztec::{ - constants_gen, - constants_gen::{CONTRACT_TREE_HEIGHT, FUNCTION_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT}, - hash::sha256_to_field, +use crate::constants::{ + ARGS_HASH_CHUNK_COUNT, + ARGS_HASH_CHUNK_LENGTH, + CONTRACT_TREE_HEIGHT, + FUNCTION_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, + GENERATOR_INDEX__SILOED_COMMITMENT, + GENERATOR_INDEX__OUTER_NULLIFIER, + GENERATOR_INDEX__VK, + GENERATOR_INDEX__CONSTRUCTOR, + GENERATOR_INDEX__PARTIAL_ADDRESS, + GENERATOR_INDEX__CONTRACT_ADDRESS, + GENERATOR_INDEX__COMMITMENT_NONCE, + GENERATOR_INDEX__UNIQUE_COMMITMENT, + GENERATOR_INDEX__FUNCTION_ARGS, }; +use dep::std::hash::sha256; + +pub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field { + let sha256_hashed = sha256(bytes_to_hash); + + // Convert it to a field element + let mut v = 1; + let mut high = 0 as Field; + let mut low = 0 as Field; + + for i in 0..16 { + high = high + (sha256_hashed[15 - i] as Field) * v; + low = low + (sha256_hashed[16 + 15 - i] as Field) * v; + v = v * 256; + } + + // Abuse that a % p + b % p = (a + b) % p and that low < p + let hash_in_a_field = low + high * v; + + hash_in_a_field +} + +pub fn hash_args(args: [Field; N]) -> Field { + if args.len() == 0 { + 0 + } else { + let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT]; + for i in 0..ARGS_HASH_CHUNK_COUNT { + let mut chunk_hash = 0; + let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH; + if start_chunk_index < (args.len() as u32) { + let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH]; + for j in 0..ARGS_HASH_CHUNK_LENGTH { + let item_index = i * ARGS_HASH_CHUNK_LENGTH + j; + if item_index < (args.len() as u32) { + chunk_args[j] = args[item_index]; + } + } + chunk_hash = dep::std::hash::pedersen_hash_with_separator(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS); + } + chunks_hashes[i] = chunk_hash; + } + dep::std::hash::pedersen_hash_with_separator(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS) + } +} + + // Checks that `value` is a member of a merkle tree with root `root` at position `index` // The witness being the `sibling_path` pub fn assert_check_membership(value : Field, index : Field, sibling_path : [Field; N], root : Field) { @@ -94,14 +151,14 @@ pub fn silo_commitment(address : Address, inner_commitment : Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ address.to_field(), inner_commitment, - ], constants_gen::GENERATOR_INDEX__SILOED_COMMITMENT) + ], GENERATOR_INDEX__SILOED_COMMITMENT) } pub fn silo_nullifier(address : Address, nullifier : Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ address.to_field(), nullifier, - ], constants_gen::GENERATOR_INDEX__OUTER_NULLIFIER) + ], GENERATOR_INDEX__OUTER_NULLIFIER) } fn merkle_hash(left : Field, right : Field) -> Field { @@ -112,7 +169,7 @@ pub fn stdlib_recursion_verification_key_compress_native_vk(_vk : VerificationKe // Original cpp code // stdlib::recursion::verification_key::compress_native(private_call.vk, GeneratorIndex::VK); // The above cpp method is only ever called on verification key, so it has been special cased here - let _hash_index = constants_gen::GENERATOR_INDEX__VK; + let _hash_index = GENERATOR_INDEX__VK; 0 } @@ -143,7 +200,7 @@ pub fn compute_constructor_hash(function_data : FunctionData, args_hash : Field, function_data_hash, args_hash, constructor_vk_hash - ], constants_gen::GENERATOR_INDEX__CONSTRUCTOR) + ], GENERATOR_INDEX__CONSTRUCTOR) } // sha256 hash is stored in two fields to accommodate all 256-bits of the hash @@ -201,7 +258,7 @@ pub fn compute_partial_address(contract_address_salt : Field, function_tree_root contract_address_salt, function_tree_root, constructor_hash - ],constants_gen::GENERATOR_INDEX__PARTIAL_ADDRESS) + ], GENERATOR_INDEX__PARTIAL_ADDRESS) } pub fn compute_contract_address_from_partial(point : Point, partial_address : Field) -> Address { @@ -209,7 +266,7 @@ pub fn compute_contract_address_from_partial(point : Point, partial_address : Fi point.x, point.y, partial_address - ],constants_gen::GENERATOR_INDEX__CONTRACT_ADDRESS); + ], GENERATOR_INDEX__CONTRACT_ADDRESS); Address::from_field(field) } @@ -217,14 +274,14 @@ pub fn compute_commitment_nonce(first_nullifier : Field, commitment_index : Fiel dep::std::hash::pedersen_hash_with_separator([ first_nullifier, commitment_index - ], constants_gen::GENERATOR_INDEX__COMMITMENT_NONCE) + ], GENERATOR_INDEX__COMMITMENT_NONCE) } pub fn compute_unique_siloed_commitment(nonce: Field, siloed_commitment: Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ nonce, siloed_commitment - ], constants_gen::GENERATOR_INDEX__UNIQUE_COMMITMENT) + ], GENERATOR_INDEX__UNIQUE_COMMITMENT) } pub fn compute_unique_siloed_commitments(first_nullifier: Field, siloed_commitments: [Field; N]) -> [Field; N] { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr index 16c2115161e..d3140b40e9b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr @@ -7,9 +7,8 @@ use crate::abis::function_data::FunctionData; use crate::abis::function_leaf_preimage::FunctionLeafPreimage; use crate::contrakt::deployment_data::ContractDeploymentData; use crate::abis::function_selector::FunctionSelector; -use crate::hash::compute_l2_to_l1_hash; +use crate::hash::{compute_l2_to_l1_hash, sha256_to_field}; use crate::abis::call_stack_item::PublicCallStackItem; -use dep::aztec::hash::sha256_to_field; use crate::abis::public_circuit_public_inputs::PublicCircuitPublicInputs; #[test] diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr index 8a33e4f28eb..90c1376598d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr @@ -8,6 +8,7 @@ mod contrakt; mod transaction; mod abis; mod block; +mod constants; mod mocked; mod hash; diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr index fca591a7fa6..89c5da01d76 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr @@ -15,7 +15,7 @@ global MSG_SENDER = Address { inner: 27 }; global DEPLOYER_PUBLIC_KEY = Point { x: 123456789, y: 123456789 }; global BLOCK_HEADER = BlockHeader { - blocks_tree_root: 0, + archive_root: 0, block: Block { note_hash_tree_root: fixtures::note_hash_tree::ROOT, nullifier_tree_root: 0, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr index 446fa2e5e70..97a57f44976 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr @@ -1,10 +1,8 @@ use crate::abis::membership_witness::ReadRequestMembershipWitness; use crate::tests::fixtures; use crate::utils::bounded_vec::BoundedVec; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, - }, }; pub fn generate_read_requests(how_many: Field) -> ( diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr index d41008a7ad4..9c4c26dffc6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr @@ -19,15 +19,13 @@ use crate::{ }, transaction::context::TxContext, }; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, VK_TREE_HEIGHT, - }, }; struct PreviousKernelDataBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr index b03552e2df6..0cb9b217f54 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr @@ -26,12 +26,10 @@ use crate::{ bounded_vec::BoundedVec, }, }; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_READ_REQUESTS_PER_CALL, - }, }; struct PrivateCallDataBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr index f000639f3df..e412af64d98 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr @@ -6,16 +6,14 @@ use crate::{ private_circuit_public_inputs::PrivateCircuitPublicInputs, }, contrakt::deployment_data::ContractDeploymentData, - hash::{compute_constructor_hash, NUM_FIELDS_PER_SHA256}, + hash::{compute_constructor_hash, NUM_FIELDS_PER_SHA256, hash_args}, tests::{ fixtures, testing_harness::build_contract_deployment_data, }, utils::bounded_vec::BoundedVec, }; -use dep::aztec::{ - abi::hash_args, - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, @@ -24,7 +22,6 @@ use dep::aztec::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, RETURN_VALUES_LENGTH, - } }; struct PrivateCircuitPublicInputsBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr index fdf4678b161..01ee980b65b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr @@ -19,7 +19,7 @@ use crate::{ }, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr index 7f1de0beee6..34d3df007c4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr @@ -13,7 +13,7 @@ use crate::{ tests::fixtures, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr index 3032020b61b..3715951ca78 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__TX_CONTEXT; use crate::contrakt::deployment_data::ContractDeploymentData; struct TxContext { @@ -12,7 +12,6 @@ struct TxContext { version : Field, } - impl TxContext { fn hash(self) -> Field { dep::std::hash::pedersen_hash_with_separator([ @@ -22,6 +21,6 @@ impl TxContext { self.contract_deployment_data.hash(), self.chain_id, self.version, - ], constants_gen::GENERATOR_INDEX__TX_CONTEXT) + ], GENERATOR_INDEX__TX_CONTEXT) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr index 8bd6a5d963e..89d48366778 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__TX_REQUEST; use crate::address::Address; use crate::transaction::context::TxContext; use crate::abis::function_data::FunctionData; @@ -17,6 +17,6 @@ impl TxRequest { self.function_data.hash(), self.args_hash, self.tx_context.hash(), - ], constants_gen::GENERATOR_INDEX__TX_REQUEST) + ], GENERATOR_INDEX__TX_REQUEST) } } diff --git a/yarn-project/noir-protocol-circuits/src/type_conversion.ts b/yarn-project/noir-protocol-circuits/src/type_conversion.ts index 9eb01a54516..ebcf5bd5856 100644 --- a/yarn-project/noir-protocol-circuits/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits/src/type_conversion.ts @@ -1,8 +1,8 @@ import { + ARCHIVE_HEIGHT, AggregationObject, AppendOnlyTreeSnapshot, AztecAddress, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, BlockHeader, @@ -114,8 +114,8 @@ import { StorageUpdateRequest as StorageUpdateRequestNoir, } from './types/public_kernel_private_previous_types.js'; import { + ArchiveRootMembershipWitness as ArchiveRootMembershipWitnessNoir, BaseRollupInputs as BaseRollupInputsNoir, - BlocksTreeRootMembershipWitness as BlocksTreeRootMembershipWitnessNoir, NullifierLeafPreimage as NullifierLeafPreimageNoir, NullifierMembershipWitness as NullifierMembershipWitnessNoir, } from './types/rollup_base_types.js'; @@ -442,7 +442,7 @@ export function mapCallRequestToNoir(callRequest: CallRequest): CallRequestNoir */ export function mapBlockHeaderToNoir(blockHeader: BlockHeader): BlockHeaderNoir { return { - blocks_tree_root: mapFieldToNoir(blockHeader.blocksTreeRoot), + archive_root: mapFieldToNoir(blockHeader.archiveRoot), block: { note_hash_tree_root: mapFieldToNoir(blockHeader.noteHashTreeRoot), nullifier_tree_root: mapFieldToNoir(blockHeader.nullifierTreeRoot), @@ -466,7 +466,7 @@ export function mapBlockHeaderFromNoir(blockHeader: BlockHeaderNoir): BlockHeade mapFieldFromNoir(blockHeader.block.nullifier_tree_root), mapFieldFromNoir(blockHeader.block.contract_tree_root), mapFieldFromNoir(blockHeader.block.l1_to_l2_messages_tree_root), - mapFieldFromNoir(blockHeader.blocks_tree_root), + mapFieldFromNoir(blockHeader.archive_root), mapFieldFromNoir(blockHeader.private_kernel_vk_tree_root), mapFieldFromNoir(blockHeader.block.public_data_tree_root), mapFieldFromNoir(blockHeader.block.global_variables_hash), @@ -1030,7 +1030,7 @@ export function mapGlobalVariablesFromNoir(globalVariables: GlobalVariablesNoir) */ export function mapConstantRollupDataToNoir(constantRollupData: ConstantRollupData): ConstantRollupDataNoir { return { - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(constantRollupData.startBlocksTreeSnapshot), + archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(constantRollupData.archiveSnapshot), private_kernel_vk_tree_root: mapFieldToNoir(constantRollupData.privateKernelVkTreeRoot), public_kernel_vk_tree_root: mapFieldToNoir(constantRollupData.publicKernelVkTreeRoot), base_rollup_vk_hash: mapFieldToNoir(constantRollupData.baseRollupVkHash), @@ -1074,7 +1074,7 @@ export function mapPublicCircuitPublicInputsToNoir( */ export function mapConstantRollupDataFromNoir(constantRollupData: ConstantRollupDataNoir): ConstantRollupData { return new ConstantRollupData( - mapAppendOnlyTreeSnapshotFromNoir(constantRollupData.start_blocks_tree_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(constantRollupData.archive_snapshot), mapFieldFromNoir(constantRollupData.private_kernel_vk_tree_root), mapFieldFromNoir(constantRollupData.public_kernel_vk_tree_root), mapFieldFromNoir(constantRollupData.base_rollup_vk_hash), @@ -1240,8 +1240,8 @@ export function mapRootRollupInputsToNoir(rootRollupInputs: RootRollupInputs): R start_l1_to_l2_messages_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir( rootRollupInputs.startL1ToL2MessagesTreeSnapshot, ), - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(rootRollupInputs.startBlocksTreeSnapshot), - new_blocks_tree_sibling_path: mapTuple(rootRollupInputs.newBlocksTreeSiblingPath, mapFieldToNoir), + start_archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(rootRollupInputs.startArchiveSnapshot), + new_archive_sibling_path: mapTuple(rootRollupInputs.newArchiveSiblingPath, mapFieldToNoir), }; } @@ -1266,8 +1266,8 @@ export function mapRootRollupPublicInputsFromNoir( mapFieldFromNoir(rootRollupPublicInputs.end_public_data_tree_root), mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_l1_to_l2_messages_tree_snapshot), mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_l1_to_l2_messages_tree_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_blocks_tree_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_blocks_tree_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_archive_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_archive_snapshot), mapTupleFromNoir(rootRollupPublicInputs.calldata_hash, 2, mapFieldFromNoir), mapTupleFromNoir(rootRollupPublicInputs.l1_to_l2_messages_hash, 2, mapFieldFromNoir), ); @@ -1293,9 +1293,9 @@ export function mapNullifierLeafPreimageToNoir( nullifierLeafPreimage: NullifierLeafPreimage, ): NullifierLeafPreimageNoir { return { - leaf_value: mapFieldToNoir(nullifierLeafPreimage.leafValue), - next_value: mapFieldToNoir(nullifierLeafPreimage.nextValue), - next_index: mapFieldToNoir(new Fr(nullifierLeafPreimage.nextIndex)), + leaf_value: mapFieldToNoir(nullifierLeafPreimage.nullifier), + next_value: mapFieldToNoir(nullifierLeafPreimage.nextNullifier), + next_index: mapNumberToNoir(Number(nullifierLeafPreimage.nextIndex)), }; } @@ -1318,9 +1318,9 @@ export function mapNullifierMembershipWitnessToNoir( * @param membershipWitness - The membership witness. * @returns The noir membership witness. */ -export function mapBlocksTreeRootMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): BlocksTreeRootMembershipWitnessNoir { +export function mapArchiveRootMembershipWitnessToNoir( + membershipWitness: MembershipWitness, +): ArchiveRootMembershipWitnessNoir { return { leaf_index: membershipWitness.leafIndex.toString(), sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), @@ -1339,7 +1339,7 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI start_nullifier_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startNullifierTreeSnapshot), start_contract_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startContractTreeSnapshot), start_public_data_tree_root: mapFieldToNoir(inputs.startPublicDataTreeRoot), - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startBlocksTreeSnapshot), + archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.archiveSnapshot), sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapFieldToNoir), sorted_new_nullifiers_indexes: mapTuple(inputs.sortednewNullifiersIndexes, mapNumberToNoir), low_nullifier_leaf_preimages: mapTuple(inputs.lowNullifierLeafPreimages, mapNullifierLeafPreimageToNoir), @@ -1358,9 +1358,9 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI inputs.newPublicDataReadsSiblingPaths, (siblingPath: Tuple) => mapTuple(siblingPath, mapFieldToNoir), ), - blocks_tree_root_membership_witnesses: mapTuple( - inputs.blocksTreeRootMembershipWitnesses, - mapBlocksTreeRootMembershipWitnessToNoir, + archive_root_membership_witnesses: mapTuple( + inputs.archiveRootMembershipWitnesses, + mapArchiveRootMembershipWitnessToNoir, ), constants: mapConstantRollupDataToNoir(inputs.constants), }; diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts index d5beb0d9ab4..73ae45036ae 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts @@ -75,7 +75,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts index 2666c692e66..1c9123613e5 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts index a84711e7fea..d48a7eb892b 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts b/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts index 7690aca56f5..2d92f904cb5 100644 --- a/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts b/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts index 739dce88379..8a974060677 100644 --- a/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts index 1c880cb2cdf..9fe5308c761 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } @@ -163,7 +163,7 @@ export interface NullifierMembershipWitness { sibling_path: FixedLengthArray; } -export interface BlocksTreeRootMembershipWitness { +export interface ArchiveRootMembershipWitness { leaf_index: Field; sibling_path: FixedLengthArray; } @@ -176,7 +176,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; @@ -190,7 +190,7 @@ export interface BaseRollupInputs { start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot; start_contract_tree_snapshot: AppendOnlyTreeSnapshot; start_public_data_tree_root: Field; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; sorted_new_nullifiers: FixedLengthArray; sorted_new_nullifiers_indexes: FixedLengthArray; low_nullifier_leaf_preimages: FixedLengthArray; @@ -200,7 +200,7 @@ export interface BaseRollupInputs { new_contracts_subtree_sibling_path: FixedLengthArray; new_public_data_update_requests_sibling_paths: FixedLengthArray, 32>; new_public_data_reads_sibling_paths: FixedLengthArray, 32>; - blocks_tree_root_membership_witnesses: FixedLengthArray; + archive_root_membership_witnesses: FixedLengthArray; constants: ConstantRollupData; } diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts index 233624ab34d..e6a4f760ab7 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts @@ -22,7 +22,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts index 83fd8b3c352..b1ad54abde9 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts @@ -22,7 +22,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; @@ -68,8 +68,8 @@ export interface RootRollupInputs { new_l1_to_l2_messages: FixedLengthArray; new_l1_to_l2_messages_tree_root_sibling_path: FixedLengthArray; start_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; - new_blocks_tree_sibling_path: FixedLengthArray; + start_archive_snapshot: AppendOnlyTreeSnapshot; + new_archive_sibling_path: FixedLengthArray; } export interface RootRollupPublicInputs { @@ -85,8 +85,8 @@ export interface RootRollupPublicInputs { end_public_data_tree_root: Field; start_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; end_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; - end_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + start_archive_snapshot: AppendOnlyTreeSnapshot; + end_archive_snapshot: AppendOnlyTreeSnapshot; calldata_hash: FixedLengthArray; l1_to_l2_messages_hash: FixedLengthArray; } diff --git a/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh b/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh index 1fa02e92d91..b8d6c301519 100755 --- a/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh +++ b/yarn-project/p2p-bootstrap/terraform/servicediscovery-drain.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash [ $# -ne 1 ] && echo "Usage: $0 " && exit 1 diff --git a/yarn-project/package.json b/yarn-project/package.json index c123e44e1b3..089ce942376 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -64,7 +64,7 @@ "resolutions": { "ts-jest@^29.1.0": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", "ts-jest@^29.1.1": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", - "@aztec/bb.js": "portal:../barretenberg/ts/package", + "@aztec/bb.js": "portal:../barretenberg/ts", "@noir-lang/acvm_js": "portal:../noir/packages/acvm_js", "@noir-lang/backend_barretenberg": "portal:../noir/packages/backend_barretenberg", "@noir-lang/types": "portal:../noir/packages/types", diff --git a/yarn-project/pxe/src/bin/index.ts b/yarn-project/pxe/src/bin/index.ts index de9e3684ed2..43e49dd8271 100644 --- a/yarn-project/pxe/src/bin/index.ts +++ b/yarn-project/pxe/src/bin/index.ts @@ -1,4 +1,5 @@ #!/usr/bin/env -S node --no-warnings +import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { createAztecNodeClient } from '@aztec/types'; @@ -16,6 +17,8 @@ const logger = createDebugLogger('aztec:pxe_service'); async function main() { logger.info(`Setting up PXE...`); + await init(); + const pxeConfig = getPXEServiceConfig(); const nodeRpcClient = createAztecNodeClient(AZTEC_NODE_URL); const pxeService = await createPXEService(nodeRpcClient, pxeConfig); diff --git a/yarn-project/pxe/src/database/memory_db.ts b/yarn-project/pxe/src/database/memory_db.ts index b0562f0ceb6..4f2849b42e5 100644 --- a/yarn-project/pxe/src/database/memory_db.ts +++ b/yarn-project/pxe/src/database/memory_db.ts @@ -128,7 +128,7 @@ export class MemoryDB extends MemoryContractDatabase implements Database { roots[MerkleTreeId.NULLIFIER_TREE], roots[MerkleTreeId.CONTRACT_TREE], roots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - roots[MerkleTreeId.BLOCKS_TREE], + roots[MerkleTreeId.ARCHIVE], Fr.ZERO, // todo: private kernel vk tree root roots[MerkleTreeId.PUBLIC_DATA_TREE], this.globalVariablesHash, @@ -142,7 +142,7 @@ export class MemoryDB extends MemoryContractDatabase implements Database { [MerkleTreeId.NULLIFIER_TREE]: blockHeader.nullifierTreeRoot, [MerkleTreeId.CONTRACT_TREE]: blockHeader.contractTreeRoot, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: blockHeader.l1ToL2MessagesTreeRoot, - [MerkleTreeId.BLOCKS_TREE]: blockHeader.blocksTreeRoot, + [MerkleTreeId.ARCHIVE]: blockHeader.archiveRoot, [MerkleTreeId.PUBLIC_DATA_TREE]: blockHeader.publicDataTreeRoot, }); } diff --git a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts index 665fd47b648..0cde8a8b396 100644 --- a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts +++ b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts @@ -1,5 +1,6 @@ import { FunctionSelector } from '@aztec/circuits.js'; import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; import { JsonRpcServer } from '@aztec/foundation/json-rpc/server'; import { @@ -24,8 +25,6 @@ import { import http from 'http'; import { foundry } from 'viem/chains'; -import { EthAddress } from '../index.js'; - export const localAnvil = foundry; /** diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 1bb9a289c74..7e495c173ea 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -147,8 +147,8 @@ export class SimulatorOracle implements DBOracle { return (await this.stateInfoProvider.getNullifierTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); case MerkleTreeId.NOTE_HASH_TREE: return (await this.stateInfoProvider.getNoteHashSiblingPath(blockNumber, leafIndex)).toFieldArray(); - case MerkleTreeId.BLOCKS_TREE: - return (await this.stateInfoProvider.getBlocksTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); + case MerkleTreeId.ARCHIVE: + return (await this.stateInfoProvider.getArchiveSiblingPath(blockNumber, leafIndex)).toFieldArray(); case MerkleTreeId.PUBLIC_DATA_TREE: return (await this.stateInfoProvider.getPublicDataTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); default: diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index 6ee3071712c..95893f10810 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -24,7 +24,7 @@ describe('Synchronizer', () => { [MerkleTreeId.NULLIFIER_TREE]: blockHeader.nullifierTreeRoot, [MerkleTreeId.PUBLIC_DATA_TREE]: blockHeader.publicDataTreeRoot, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: blockHeader.l1ToL2MessagesTreeRoot, - [MerkleTreeId.BLOCKS_TREE]: blockHeader.blocksTreeRoot, + [MerkleTreeId.ARCHIVE]: blockHeader.archiveRoot, }; aztecNode = mock(); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 98dfd30c7e6..1c557386c18 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -206,7 +206,7 @@ export class Synchronizer { block.endNullifierTreeSnapshot.root, block.endContractTreeSnapshot.root, block.endL1ToL2MessagesTreeSnapshot.root, - block.endBlocksTreeSnapshot.root, + block.endArchiveSnapshot.root, Fr.ZERO, // todo: private kernel vk tree root block.endPublicDataTreeRoot, globalsHash, diff --git a/yarn-project/scripts/run_script.sh b/yarn-project/scripts/run_script.sh index a3347411e85..a06cf4eb42e 100755 --- a/yarn-project/scripts/run_script.sh +++ b/yarn-project/scripts/run_script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash # Downloads the image that contains the built scripts package and executes the given command in it. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu diff --git a/yarn-project/scripts/version_packages.sh b/yarn-project/scripts/version_packages.sh index a708cb7a4ed..bf631bd8ac1 100755 --- a/yarn-project/scripts/version_packages.sh +++ b/yarn-project/scripts/version_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu if [ -n "$COMMIT_TAG" ]; then diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts index 808e02f6cae..5909afce8b3 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts @@ -143,7 +143,7 @@ describe('sequencer/solo_block_builder', () => { await expectsDb.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGES_TREE, asBuffer); }; - const updateBlocksTree = async () => { + const updateArchive = async () => { const blockHash = computeBlockHashWithGlobals( globalVariables, rootRollupOutput.endNoteHashTreeSnapshot.root, @@ -152,7 +152,7 @@ describe('sequencer/solo_block_builder', () => { rootRollupOutput.endL1ToL2MessagesTreeSnapshot.root, rootRollupOutput.endPublicDataTreeRoot, ); - await expectsDb.appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await expectsDb.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); }; const getTreeSnapshot = async (tree: MerkleTreeId) => { @@ -204,8 +204,8 @@ describe('sequencer/solo_block_builder', () => { // Calculate block hash rootRollupOutput.globalVariables = globalVariables; - await updateBlocksTree(); - rootRollupOutput.endBlocksTreeSnapshot = await getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); + await updateArchive(); + rootRollupOutput.endArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE); const txs = [...txsLeft, ...txsRight]; @@ -235,8 +235,8 @@ describe('sequencer/solo_block_builder', () => { endPublicDataTreeRoot: rootRollupOutput.endPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: rootRollupOutput.startL1ToL2MessagesTreeSnapshot, endL1ToL2MessagesTreeSnapshot: rootRollupOutput.endL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot: rootRollupOutput.startBlocksTreeSnapshot, - endBlocksTreeSnapshot: rootRollupOutput.endBlocksTreeSnapshot, + startArchiveSnapshot: rootRollupOutput.startArchiveSnapshot, + endArchiveSnapshot: rootRollupOutput.endArchiveSnapshot, newCommitments, newNullifiers, newContracts, diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index 7f70eb98b1c..7b19203826f 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -1,6 +1,6 @@ import { + ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, CONTRACT_SUBTREE_HEIGHT, @@ -58,11 +58,9 @@ import { RollupProver } from '../prover/index.js'; import { ProcessedTx } from '../sequencer/processed_tx.js'; import { RollupSimulator } from '../simulator/index.js'; import { BlockBuilder } from './index.js'; -import { AllowedTreeNames, OutputWithTreeSnapshot } from './types.js'; +import { AllowedTreeNames, OutputWithTreeSnapshot, TreeNames } from './types.js'; const frToBigInt = (fr: Fr) => toBigIntBE(fr.toBuffer()); -const bigintToFr = (num: bigint) => new Fr(num); -const bigintToNum = (num: bigint) => Number(num); // Denotes fields that are not used now, but will be in the future const FUTURE_FR = new Fr(0n); @@ -103,7 +101,7 @@ export class SoloBlockBuilder implements BlockBuilder { startContractTreeSnapshot, startPublicDataTreeSnapshot, startL1ToL2MessageTreeSnapshot, - startBlocksTreeSnapshot, + startArchiveSnapshot, ] = await Promise.all( [ MerkleTreeId.NOTE_HASH_TREE, @@ -111,7 +109,7 @@ export class SoloBlockBuilder implements BlockBuilder { MerkleTreeId.CONTRACT_TREE, MerkleTreeId.PUBLIC_DATA_TREE, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, - MerkleTreeId.BLOCKS_TREE, + MerkleTreeId.ARCHIVE, ].map(tree => this.getTreeSnapshot(tree)), ); @@ -127,7 +125,7 @@ export class SoloBlockBuilder implements BlockBuilder { endContractTreeSnapshot, endPublicDataTreeRoot, endL1ToL2MessagesTreeSnapshot, - endBlocksTreeSnapshot, + endArchiveSnapshot, } = circuitsOutput; // Collect all new nullifiers, commitments, and contracts from all txs in this block @@ -167,8 +165,8 @@ export class SoloBlockBuilder implements BlockBuilder { endPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: startL1ToL2MessageTreeSnapshot, endL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, - endBlocksTreeSnapshot, + startArchiveSnapshot, + endArchiveSnapshot, newCommitments, newNullifiers, newL2ToL1Msgs, @@ -313,17 +311,17 @@ export class SoloBlockBuilder implements BlockBuilder { this.debug(`Updating and validating root trees`); const globalVariablesHash = computeGlobalsHash(left[0].constants.globalVariables); await this.db.updateLatestGlobalVariablesHash(globalVariablesHash); - await this.db.updateBlocksTree(globalVariablesHash); + await this.db.updateArchive(globalVariablesHash); await this.validateRootOutput(rootOutput); return [rootOutput, rootProof]; } - async updateBlocksTree(globalVariables: GlobalVariables) { + async updateArchive(globalVariables: GlobalVariables) { // Calculate the block hash and add it to the historical block hashes tree const blockHash = await this.calculateBlockHash(globalVariables); - await this.db.appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this.db.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); } protected async calculateBlockHash(globals: GlobalVariables) { @@ -353,9 +351,9 @@ export class SoloBlockBuilder implements BlockBuilder { // Validate that the new roots we calculated from manual insertions match the outputs of the simulation protected async validateTrees(rollupOutput: BaseOrMergeRollupPublicInputs | RootRollupPublicInputs) { await Promise.all([ - this.validateTree(rollupOutput, MerkleTreeId.CONTRACT_TREE, 'Contract'), - this.validateTree(rollupOutput, MerkleTreeId.NOTE_HASH_TREE, 'NoteHash'), - this.validateTree(rollupOutput, MerkleTreeId.NULLIFIER_TREE, 'Nullifier'), + this.validateTree(rollupOutput, MerkleTreeId.CONTRACT_TREE, 'ContractTree'), + this.validateTree(rollupOutput, MerkleTreeId.NOTE_HASH_TREE, 'NoteHashTree'), + this.validateTree(rollupOutput, MerkleTreeId.NULLIFIER_TREE, 'NullifierTree'), this.validatePublicDataTreeRoot(rollupOutput), ]); } @@ -364,8 +362,8 @@ export class SoloBlockBuilder implements BlockBuilder { protected async validateRootOutput(rootOutput: RootRollupPublicInputs) { await Promise.all([ this.validateTrees(rootOutput), - this.validateTree(rootOutput, MerkleTreeId.BLOCKS_TREE, 'Blocks'), - this.validateTree(rootOutput, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, 'L1ToL2Messages'), + this.validateTree(rootOutput, MerkleTreeId.ARCHIVE, 'Archive'), + this.validateTree(rootOutput, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, 'L1ToL2MessagesTree'), ]); } @@ -395,7 +393,7 @@ export class SoloBlockBuilder implements BlockBuilder { } const localTree = await this.getTreeSnapshot(treeId); - const simulatedTree = (output as OutputWithTreeSnapshot)[`end${name}TreeSnapshot`]; + const simulatedTree = (output as OutputWithTreeSnapshot)[`end${name}Snapshot`]; this.validateSimulatedTree(localTree, simulatedTree, name); } @@ -403,7 +401,7 @@ export class SoloBlockBuilder implements BlockBuilder { protected validateSimulatedTree( localTree: AppendOnlyTreeSnapshot, simulatedTree: AppendOnlyTreeSnapshot, - name: 'NoteHash' | 'Contract' | 'Nullifier' | 'L1ToL2Messages' | 'Blocks', + name: TreeNames, label?: string, ) { if (!simulatedTree.root.toBuffer().equals(localTree.root.toBuffer())) { @@ -456,12 +454,12 @@ export class SoloBlockBuilder implements BlockBuilder { const startL1ToL2MessagesTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE); // Get blocks tree - const startBlocksTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); - const newBlocksTreeSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.BLOCKS_TREE); + const startArchiveSnapshot = await this.getTreeSnapshot(MerkleTreeId.ARCHIVE); + const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); - const newBlocksTreeSiblingPath = makeTuple( - BLOCKS_TREE_HEIGHT, - i => (i < newBlocksTreeSiblingPathArray.length ? newBlocksTreeSiblingPathArray[i] : Fr.ZERO), + const newArchiveSiblingPath = makeTuple( + ARCHIVE_HEIGHT, + i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), 0, ); @@ -470,8 +468,8 @@ export class SoloBlockBuilder implements BlockBuilder { newL1ToL2Messages, newL1ToL2MessagesTreeRootSiblingPath, startL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, - newBlocksTreeSiblingPath, + startArchiveSnapshot, + newArchiveSiblingPath, }); } @@ -540,7 +538,7 @@ export class SoloBlockBuilder implements BlockBuilder { l1ToL2MessagesTreeRoot, publicDataTreeRoot, ); - return this.getMembershipWitnessFor(blockHash, MerkleTreeId.BLOCKS_TREE, BLOCKS_TREE_HEIGHT); + return this.getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT); } protected async getConstantRollupData(globalVariables: GlobalVariables): Promise { @@ -549,7 +547,7 @@ export class SoloBlockBuilder implements BlockBuilder { mergeRollupVkHash: DELETE_FR, privateKernelVkTreeRoot: FUTURE_FR, publicKernelVkTreeRoot: FUTURE_FR, - startBlocksTreeSnapshot: await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE), + archiveSnapshot: await this.getTreeSnapshot(MerkleTreeId.ARCHIVE), globalVariables, }); } @@ -566,19 +564,16 @@ export class SoloBlockBuilder implements BlockBuilder { const tree = MerkleTreeId.NULLIFIER_TREE; const prevValueIndex = await this.db.getPreviousValueIndex(tree, frToBigInt(nullifier)); - const prevValueInfo = await this.db.getLeafData(tree, prevValueIndex.index); - if (!prevValueInfo) { + if (!prevValueIndex) { throw new Error(`Nullifier tree should have one initial leaf`); } + const prevValuePreimage = (await this.db.getLeafPreimage(tree, prevValueIndex.index))!; + const prevValueSiblingPath = await this.db.getSiblingPath(tree, BigInt(prevValueIndex.index)); return { index: prevValueIndex, - leafPreimage: new NullifierLeafPreimage( - bigintToFr(prevValueInfo.value), - bigintToFr(prevValueInfo.nextValue), - bigintToNum(prevValueInfo.nextIndex), - ), + leafPreimage: prevValuePreimage, witness: new MembershipWitness( NULLIFIER_TREE_HEIGHT, BigInt(prevValueIndex.index), @@ -640,7 +635,7 @@ export class SoloBlockBuilder implements BlockBuilder { const startContractTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.CONTRACT_TREE); const startNoteHashTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE); const startPublicDataTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE); - const startBlocksTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); + const startArchiveSnapshot = await this.getTreeSnapshot(MerkleTreeId.ARCHIVE); // Get the subtree sibling paths for the circuit const newCommitmentsSubtreeSiblingPathArray = await this.getSubtreeSiblingPath( @@ -725,7 +720,7 @@ export class SoloBlockBuilder implements BlockBuilder { startContractTreeSnapshot, startNoteHashTreeSnapshot, startPublicDataTreeRoot: startPublicDataTreeSnapshot.root, - startBlocksTreeSnapshot, + archiveSnapshot: startArchiveSnapshot, sortedNewNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => Fr.fromBuffer(sortedNewNullifiers[i])), sortednewNullifiersIndexes: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => sortednewNullifiersIndexes[i]), newCommitmentsSubtreeSiblingPath, @@ -737,12 +732,8 @@ export class SoloBlockBuilder implements BlockBuilder { newPublicDataReadsSiblingPaths, lowNullifierLeafPreimages: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => i < nullifierWitnessLeaves.length - ? new NullifierLeafPreimage( - new Fr(nullifierWitnessLeaves[i].leafData.value), - new Fr(nullifierWitnessLeaves[i].leafData.nextValue), - Number(nullifierWitnessLeaves[i].leafData.nextIndex), - ) - : new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0), + ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage) + : NullifierLeafPreimage.empty(), ), lowNullifierMembershipWitness: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => i < lowNullifierMembershipWitnesses.length @@ -750,7 +741,7 @@ export class SoloBlockBuilder implements BlockBuilder { : this.makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), ), kernelData: [this.getKernelDataFor(left), this.getKernelDataFor(right)], - blocksTreeRootMembershipWitnesses: [ + archiveRootMembershipWitnesses: [ await this.getHistoricalTreesMembershipWitnessFor(left), await this.getHistoricalTreesMembershipWitnessFor(right), ], diff --git a/yarn-project/sequencer-client/src/block_builder/types.ts b/yarn-project/sequencer-client/src/block_builder/types.ts index b39f0a90a17..8a645ca8c56 100644 --- a/yarn-project/sequencer-client/src/block_builder/types.ts +++ b/yarn-project/sequencer-client/src/block_builder/types.ts @@ -1,16 +1,23 @@ import { AppendOnlyTreeSnapshot, BaseOrMergeRollupPublicInputs, RootRollupPublicInputs } from '@aztec/circuits.js'; +/** + * Type representing the names of the trees for the base rollup. + */ +type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree'; +/** + * Type representing the names of the trees. + */ +export type TreeNames = BaseTreeNames | 'L1ToL2MessagesTree' | 'Archive'; + /** * Type to assert that only the correct trees are checked when validating rollup tree outputs. */ export type AllowedTreeNames = - T extends RootRollupPublicInputs - ? 'NoteHash' | 'Contract' | 'Nullifier' | 'L1ToL2Messages' | 'Blocks' - : 'NoteHash' | 'Contract' | 'Nullifier'; + T extends RootRollupPublicInputs ? TreeNames : BaseTreeNames; /** * Type to assert the correct object field is indexed when validating rollup tree outputs. */ export type OutputWithTreeSnapshot = { - [K in `end${AllowedTreeNames}TreeSnapshot`]: AppendOnlyTreeSnapshot; + [K in `end${AllowedTreeNames}Snapshot`]: AppendOnlyTreeSnapshot; }; diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 62cde96601b..d315146837f 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -17,7 +17,7 @@ export async function getBlockHeader( Fr.fromBuffer(roots.nullifierTreeRoot), Fr.fromBuffer(roots.contractDataTreeRoot), Fr.fromBuffer(roots.l1Tol2MessagesTreeRoot), - Fr.fromBuffer(roots.blocksTreeRoot), + Fr.fromBuffer(roots.archiveRoot), Fr.ZERO, Fr.fromBuffer(roots.publicDataTreeRoot), prevGlobalsHash, diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 17e999a421f..5c472ea84dc 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -5,7 +5,10 @@ "exports": { ".": "./dest/index.js", "./stats": "./dest/stats/index.js", - "./jest": "./dest/jest/index.js" + "./jest": "./dest/jest/index.js", + "./interfaces": "./dest/interfaces/index.js", + "./log_id": "./dest/logs/log_id.js", + "./tx_hash": "./dest/tx/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/types/src/index.ts b/yarn-project/types/src/index.ts index 3ffc8125103..149ec5a7a6a 100644 --- a/yarn-project/types/src/index.ts +++ b/yarn-project/types/src/index.ts @@ -23,5 +23,4 @@ export * from './interfaces/index.js'; export * from './sibling_path.js'; export * from './auth_witness.js'; export * from './aztec_node/rpc/index.js'; -export * from '@aztec/circuits.js/types'; -export { CompleteAddress } from '@aztec/circuits.js'; +export { CompleteAddress, PublicKey, PartialAddress, GrumpkinPrivateKey } from '@aztec/circuits.js'; diff --git a/yarn-project/types/src/interfaces/deployed-contract.ts b/yarn-project/types/src/interfaces/deployed-contract.ts index 1e5fd058197..784b162ef67 100644 --- a/yarn-project/types/src/interfaces/deployed-contract.ts +++ b/yarn-project/types/src/interfaces/deployed-contract.ts @@ -1,6 +1,6 @@ -import { EthAddress } from '@aztec/circuits.js'; +import { CompleteAddress } from '@aztec/circuits.js'; import { ContractArtifact } from '@aztec/foundation/abi'; -import { CompleteAddress } from '@aztec/types'; +import { EthAddress } from '@aztec/foundation/eth-address'; /** * Represents a deployed contract on the Aztec network. diff --git a/yarn-project/types/src/interfaces/index.ts b/yarn-project/types/src/interfaces/index.ts index e0cbc82677c..44ed98bbed4 100644 --- a/yarn-project/types/src/interfaces/index.ts +++ b/yarn-project/types/src/interfaces/index.ts @@ -6,5 +6,4 @@ export * from './deployed-contract.js'; export * from './node-info.js'; export * from './sync-status.js'; export * from './configs.js'; -export * from './leaf_data.js'; -export * from './nullifier_witness.js'; +export * from './nullifier_tree.js'; diff --git a/yarn-project/types/src/interfaces/leaf_data.ts b/yarn-project/types/src/interfaces/leaf_data.ts deleted file mode 100644 index 2edc8e09818..00000000000 --- a/yarn-project/types/src/interfaces/leaf_data.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** - * A leaf of a tree. - */ -export interface LeafData { - /** - * A value of the leaf. - */ - value: bigint; - /** - * An index of the next leaf. - */ - nextIndex: bigint; - /** - * A value of the next leaf. - */ - nextValue: bigint; -} diff --git a/yarn-project/types/src/interfaces/nullifier_witness.ts b/yarn-project/types/src/interfaces/nullifier_tree.ts similarity index 79% rename from yarn-project/types/src/interfaces/nullifier_witness.ts rename to yarn-project/types/src/interfaces/nullifier_tree.ts index 90dc6d9a1c7..14fdf426b8d 100644 --- a/yarn-project/types/src/interfaces/nullifier_witness.ts +++ b/yarn-project/types/src/interfaces/nullifier_tree.ts @@ -1,7 +1,6 @@ -import { Fr, NULLIFIER_TREE_HEIGHT } from '@aztec/circuits.js'; +import { Fr, NULLIFIER_TREE_HEIGHT, NullifierLeafPreimage } from '@aztec/circuits.js'; import { SiblingPath } from '../sibling_path.js'; -import { LeafData } from './leaf_data.js'; /** * Nullifier membership witness. @@ -18,7 +17,7 @@ export class NullifierMembershipWitness { /** * Preimage of the nullifier. */ - public readonly leafData: LeafData, + public readonly leafPreimage: NullifierLeafPreimage, /** * Sibling path to prove membership of the nullifier. */ @@ -32,9 +31,9 @@ export class NullifierMembershipWitness { public toFieldArray(): Fr[] { return [ new Fr(this.index), - new Fr(this.leafData.value), - new Fr(this.leafData.nextIndex), - new Fr(this.leafData.nextValue), + new Fr(this.leafPreimage.nullifier), + new Fr(this.leafPreimage.nextIndex), + new Fr(this.leafPreimage.nextNullifier), ...this.siblingPath.toFieldArray(), ]; } diff --git a/yarn-project/types/src/interfaces/pxe.ts b/yarn-project/types/src/interfaces/pxe.ts index 1a2e52c1262..a78cf957bbd 100644 --- a/yarn-project/types/src/interfaces/pxe.ts +++ b/yarn-project/types/src/interfaces/pxe.ts @@ -1,7 +1,6 @@ -import { AztecAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; +import { AztecAddress, CompleteAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; import { AuthWitness, - CompleteAddress, ContractData, ExtendedContractData, ExtendedNote, diff --git a/yarn-project/types/src/interfaces/state_info_provider.ts b/yarn-project/types/src/interfaces/state_info_provider.ts index cec3f6fed55..7818f984dab 100644 --- a/yarn-project/types/src/interfaces/state_info_provider.ts +++ b/yarn-project/types/src/interfaces/state_info_provider.ts @@ -1,5 +1,5 @@ import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_TREE_HEIGHT, Fr, L1_TO_L2_MSG_TREE_HEIGHT, @@ -12,7 +12,7 @@ import { L1ToL2MessageAndIndex } from '../l1_to_l2_message.js'; import { L2Block } from '../l2_block.js'; import { MerkleTreeId } from '../merkle_tree_id.js'; import { SiblingPath } from '../sibling_path.js'; -import { NullifierMembershipWitness } from './nullifier_witness.js'; +import { NullifierMembershipWitness } from './nullifier_tree.js'; /** Helper type for a specific L2 block number or the latest block number */ type BlockNumber = number | 'latest'; @@ -93,10 +93,7 @@ export interface StateInfoProvider { * @returns The sibling path. * TODO: https://github.com/AztecProtocol/aztec-packages/issues/3414 */ - getBlocksTreeSiblingPath( - blockNumber: BlockNumber, - leafIndex: bigint, - ): Promise>; + getArchiveSiblingPath(blockNumber: BlockNumber, leafIndex: bigint): Promise>; /** * Returns a sibling path for a leaf in the committed public data tree. diff --git a/yarn-project/types/src/l2_block.ts b/yarn-project/types/src/l2_block.ts index 67832ef2da7..2918ba99bd3 100644 --- a/yarn-project/types/src/l2_block.ts +++ b/yarn-project/types/src/l2_block.ts @@ -86,9 +86,9 @@ export class L2Block { */ public startL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot, /** - * The tree snapshot of the blocks tree at the start of the rollup. + * The tree snapshot of the archive at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot = AppendOnlyTreeSnapshot.empty(), + public startArchiveSnapshot: AppendOnlyTreeSnapshot = AppendOnlyTreeSnapshot.empty(), /** * The tree snapshot of the note hash tree at the end of the rollup. */ @@ -110,9 +110,9 @@ export class L2Block { */ public endL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot, /** - * The tree snapshot of the blocks tree at the end of the rollup. + * The tree snapshot of the archive at the end of the rollup. */ - public endBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public endArchiveSnapshot: AppendOnlyTreeSnapshot, /** * The commitments to be inserted into the note hash tree. */ @@ -216,13 +216,13 @@ export class L2Block { startContractTreeSnapshot: makeAppendOnlyTreeSnapshot(0), startPublicDataTreeRoot: Fr.random(), startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startArchiveSnapshot: makeAppendOnlyTreeSnapshot(0), endNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(newCommitments.length), endNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(newNullifiers.length), endContractTreeSnapshot: makeAppendOnlyTreeSnapshot(newContracts.length), endPublicDataTreeRoot: Fr.random(), endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(1), - endBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(1), + endArchiveSnapshot: makeAppendOnlyTreeSnapshot(1), newCommitments, newNullifiers, newContracts, @@ -277,9 +277,9 @@ export class L2Block { */ startL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot; /** - * The tree snapshot of the blocks tree at the start of the rollup. + * The tree snapshot of the archive at the start of the rollup. */ - startBlocksTreeSnapshot: AppendOnlyTreeSnapshot; + startArchiveSnapshot: AppendOnlyTreeSnapshot; /** * The tree snapshot of the note hash tree at the end of the rollup. */ @@ -301,9 +301,9 @@ export class L2Block { */ endL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot; /** - * The tree snapshot of the blocks tree at the end of the rollup. + * The tree snapshot of the archive at the end of the rollup. */ - endBlocksTreeSnapshot: AppendOnlyTreeSnapshot; + endArchiveSnapshot: AppendOnlyTreeSnapshot; /** * The commitments to be inserted into the note hash tree. */ @@ -352,13 +352,13 @@ export class L2Block { fields.startContractTreeSnapshot, fields.startPublicDataTreeRoot, fields.startL1ToL2MessagesTreeSnapshot, - fields.startBlocksTreeSnapshot, + fields.startArchiveSnapshot, fields.endNoteHashTreeSnapshot, fields.endNullifierTreeSnapshot, fields.endContractTreeSnapshot, fields.endPublicDataTreeRoot, fields.endL1ToL2MessagesTreeSnapshot, - fields.endBlocksTreeSnapshot, + fields.endArchiveSnapshot, fields.newCommitments, fields.newNullifiers, fields.newPublicDataWrites, @@ -387,13 +387,13 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, this.endNoteHashTreeSnapshot, this.endNullifierTreeSnapshot, this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, this.newCommitments.length, this.newCommitments, this.newNullifiers.length, @@ -449,13 +449,13 @@ export class L2Block { const startContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const startPublicDataTreeRoot = reader.readObject(Fr); const startL1ToL2MessagesTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const startBlocksTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const startArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endNoteHashTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endNullifierTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endPublicDataTreeRoot = reader.readObject(Fr); const endL1ToL2MessagesTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endBlocksTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const endArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const newCommitments = reader.readVector(Fr); const newNullifiers = reader.readVector(Fr); const newPublicDataWrites = reader.readVector(PublicDataWrite); @@ -473,13 +473,13 @@ export class L2Block { startContractTreeSnapshot, startPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: startL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, + startArchiveSnapshot, endNoteHashTreeSnapshot, endNullifierTreeSnapshot, endContractTreeSnapshot, endPublicDataTreeRoot, endL1ToL2MessagesTreeSnapshot, - endBlocksTreeSnapshot, + endArchiveSnapshot, newCommitments, newNullifiers, newPublicDataWrites, @@ -589,13 +589,13 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, this.endNoteHashTreeSnapshot, this.endNullifierTreeSnapshot, this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, this.getCalldataHash(), this.getL1ToL2MessagesHash(), ); @@ -615,7 +615,7 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, ); return sha256(inputValue); } @@ -632,7 +632,7 @@ export class L2Block { this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, ); return sha256(inputValue); } @@ -843,14 +843,14 @@ export class L2Block { `startContractTreeSnapshot: ${inspectTreeSnapshot(this.startContractTreeSnapshot)}`, `startPublicDataTreeRoot: ${this.startPublicDataTreeRoot.toString()}`, `startL1ToL2MessagesTreeSnapshot: ${inspectTreeSnapshot(this.startL1ToL2MessagesTreeSnapshot)}`, - `startBlocksTreeSnapshot: ${inspectTreeSnapshot(this.startBlocksTreeSnapshot)}`, + `startArchiveSnapshot: ${inspectTreeSnapshot(this.startArchiveSnapshot)}`, `endNoteHashTreeSnapshot: ${inspectTreeSnapshot(this.endNoteHashTreeSnapshot)}`, `endNullifierTreeSnapshot: ${inspectTreeSnapshot(this.endNullifierTreeSnapshot)}`, `endContractTreeSnapshot: ${inspectTreeSnapshot(this.endContractTreeSnapshot)}`, `endPublicDataTreeRoot: ${this.endPublicDataTreeRoot.toString()}`, `endPublicDataTreeRoot: ${this.endPublicDataTreeRoot.toString()}`, `endL1ToL2MessagesTreeSnapshot: ${inspectTreeSnapshot(this.endL1ToL2MessagesTreeSnapshot)}`, - `endBlocksTreeSnapshot: ${inspectTreeSnapshot(this.endBlocksTreeSnapshot)}`, + `endArchiveSnapshot: ${inspectTreeSnapshot(this.endArchiveSnapshot)}`, `newCommitments: ${inspectFrArray(this.newCommitments)}`, `newNullifiers: ${inspectFrArray(this.newNullifiers)}`, `newPublicDataWrite: ${inspectPublicDataWriteArray(this.newPublicDataWrites)}`, diff --git a/yarn-project/types/src/merkle_tree_id.ts b/yarn-project/types/src/merkle_tree_id.ts index 55f63259227..fde02472627 100644 --- a/yarn-project/types/src/merkle_tree_id.ts +++ b/yarn-project/types/src/merkle_tree_id.ts @@ -7,7 +7,7 @@ export enum MerkleTreeId { NOTE_HASH_TREE = 2, PUBLIC_DATA_TREE = 3, L1_TO_L2_MESSAGES_TREE = 4, - BLOCKS_TREE = 5, + ARCHIVE = 5, } export const merkleTreeIds = () => { diff --git a/yarn-project/types/src/tx/tx_hash.ts b/yarn-project/types/src/tx/tx_hash.ts index 138aa6dbe0f..00003456ee3 100644 --- a/yarn-project/types/src/tx/tx_hash.ts +++ b/yarn-project/types/src/tx/tx_hash.ts @@ -1,4 +1,3 @@ -import { assertMemberLength } from '@aztec/circuits.js'; import { deserializeBigInt, serializeBigInt } from '@aztec/foundation/serialize'; /** @@ -21,7 +20,9 @@ export class TxHash { */ public buffer: Buffer, ) { - assertMemberLength(this, 'buffer', TxHash.SIZE); + if (buffer.length !== TxHash.SIZE) { + throw new Error(`Expected buffer to have length ${TxHash.SIZE} but was ${buffer.length}`); + } } /** diff --git a/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts index 29ba293736d..9f3f6976936 100644 --- a/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts @@ -1,6 +1,8 @@ +import { NullifierLeafPreimage } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; import { CurrentTreeRoots, HandleL2BlockResult, MerkleTreeDb, MerkleTreeOperations, TreeInfo } from '../index.js'; @@ -59,16 +61,19 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { getPreviousValueIndex( treeId: MerkleTreeId.NULLIFIER_TREE, value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { return this.trees.getPreviousValueIndex(treeId, value, this.includeUncommitted); } @@ -79,7 +84,7 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { * @param index - The index to insert into. * @returns Empty promise. */ - updateLeaf(treeId: MerkleTreeId.NULLIFIER_TREE, leaf: LeafData, index: bigint): Promise { + updateLeaf(treeId: MerkleTreeId.NULLIFIER_TREE, leaf: NullifierLeafPreimage, index: bigint): Promise { return this.trees.updateLeaf(treeId, leaf, index); } @@ -87,10 +92,14 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { * Gets the leaf data at a given index and tree. * @param treeId - The ID of the tree get the leaf from. * @param index - The index of the leaf to get. - * @returns Leaf data. + * @returns Leaf preimage. */ - getLeafData(treeId: MerkleTreeId.NULLIFIER_TREE, index: number): Promise { - return this.trees.getLeafData(treeId, index, this.includeUncommitted); + async getLeafPreimage( + treeId: MerkleTreeId.NULLIFIER_TREE, + index: bigint, + ): Promise { + const preimage = await this.trees.getLeafPreimage(treeId, index, this.includeUncommitted); + return preimage as IndexedTreeLeafPreimage | undefined; } /** @@ -115,13 +124,12 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { } /** - * Inserts into the roots trees (CONTRACT_TREE_ROOTS_TREE, NOTE_HASH_TREE_ROOTS_TREE) - * the current roots of the corresponding trees (CONTRACT_TREE, NOTE_HASH_TREE). - * @param globalVariablesHash - The hash of the current global variables to include in the block hash. - * @returns Empty promise. + * Inserts the new block hash into the archive. + * This includes all of the current roots of all of the data trees and the current blocks global vars. + * @param globalVariablesHash - The global variables hash to insert into the block hash. */ - public updateBlocksTree(globalVariablesHash: Fr): Promise { - return this.trees.updateBlocksTree(globalVariablesHash, this.includeUncommitted); + public updateArchive(globalVariablesHash: Fr): Promise { + return this.trees.updateArchive(globalVariablesHash, this.includeUncommitted); } /** diff --git a/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts index 93c94d19163..1fd883b98b5 100644 --- a/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts @@ -1,6 +1,7 @@ import { Fr } from '@aztec/circuits.js'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult, IndexedTreeSnapshot, TreeSnapshot } from '@aztec/merkle-tree'; -import { LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { MerkleTreeId, SiblingPath } from '@aztec/types'; import { CurrentTreeRoots, HandleL2BlockResult, MerkleTreeDb, MerkleTreeOperations, TreeInfo } from '../index.js'; @@ -28,23 +29,19 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations async findLeafIndex(treeId: MerkleTreeId, value: Buffer): Promise { const tree = await this.#getTreeSnapshot(treeId); - const numLeaves = tree.getNumLeaves(); - for (let i = 0n; i < numLeaves; i++) { - const currentValue = await tree.getLeafValue(i); - if (currentValue && currentValue.equals(value)) { - return i; - } - } - return undefined; + return tree.findLeafIndex(value); } getLatestGlobalVariablesHash(): Promise { return Promise.reject(new Error('not implemented')); } - async getLeafData(treeId: MerkleTreeId.NULLIFIER_TREE, index: number): Promise { + async getLeafPreimage( + treeId: MerkleTreeId.NULLIFIER_TREE, + index: bigint, + ): Promise { const snapshot = (await this.#getTreeSnapshot(treeId)) as IndexedTreeSnapshot; - return snapshot.getLatestLeafDataCopy(BigInt(index)); + return snapshot.getLatestLeafPreimageCopy(BigInt(index)); } async getLeafValue(treeId: MerkleTreeId, index: bigint): Promise { @@ -55,16 +52,19 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations getPreviousValueIndex( _treeId: MerkleTreeId.NULLIFIER_TREE, _value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { return Promise.reject(new Error('not implemented')); } @@ -90,11 +90,11 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations this.#getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), this.#getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), this.#getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), - this.#getTreeSnapshot(MerkleTreeId.BLOCKS_TREE), + this.#getTreeSnapshot(MerkleTreeId.ARCHIVE), ]); return { - blocksTreeRoot: snapshots[MerkleTreeId.BLOCKS_TREE].getRoot(), + archiveRoot: snapshots[MerkleTreeId.ARCHIVE].getRoot(), contractDataTreeRoot: snapshots[MerkleTreeId.CONTRACT_TREE].getRoot(), l1Tol2MessagesTreeRoot: snapshots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE].getRoot(), noteHashTreeRoot: snapshots[MerkleTreeId.NOTE_HASH_TREE].getRoot(), @@ -113,7 +113,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations return Promise.reject(new Error('Tree snapshot operations are read-only')); } - updateBlocksTree(): Promise { + updateArchive(): Promise { return Promise.reject(new Error('Tree snapshot operations are read-only')); } @@ -129,7 +129,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations return Promise.reject(new Error('Tree snapshot operations are read-only')); } - updateHistoricBlocksTree(): Promise { + updateHistoricArchive(): Promise { return Promise.reject(new Error('Tree snapshot operations are read-only')); } diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index d5e3798a7c6..5e90a48f00f 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -72,13 +72,13 @@ const getMockBlock = (blockNumber: number, newContractsCommitments?: Buffer[]) = startContractTreeSnapshot: getMockTreeSnapshot(), startPublicDataTreeRoot: Fr.random(), startL1ToL2MessagesTreeSnapshot: getMockTreeSnapshot(), - startBlocksTreeSnapshot: getMockTreeSnapshot(), + startArchiveSnapshot: getMockTreeSnapshot(), endNoteHashTreeSnapshot: getMockTreeSnapshot(), endNullifierTreeSnapshot: getMockTreeSnapshot(), endContractTreeSnapshot: getMockTreeSnapshot(), endPublicDataTreeRoot: Fr.random(), endL1ToL2MessagesTreeSnapshot: getMockTreeSnapshot(), - endBlocksTreeSnapshot: getMockTreeSnapshot(), + endArchiveSnapshot: getMockTreeSnapshot(), newCommitments: times(MAX_NEW_COMMITMENTS_PER_TX, Fr.random), newNullifiers: times(MAX_NEW_NULLIFIERS_PER_TX, Fr.random), newContracts: newContractsCommitments?.map(x => Fr.fromBuffer(x)) ?? [Fr.random()], diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts index 13c6617513d..4ae2abd0bb0 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts @@ -1,8 +1,9 @@ -import { MAX_NEW_NULLIFIERS_PER_TX } from '@aztec/circuits.js'; +import { MAX_NEW_NULLIFIERS_PER_TX, NullifierLeafPreimage } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult, IndexedTreeSnapshot, TreeSnapshot } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; /** * Type alias for the nullifier tree ID. @@ -73,8 +74,8 @@ export type CurrentTreeRoots = { l1Tol2MessagesTreeRoot: Buffer; /** Nullifier data tree root. */ nullifierTreeRoot: Buffer; - /** Blocks tree root. */ - blocksTreeRoot: Buffer; + /** Archive root. */ + archiveRoot: Buffer; /** Public data tree root */ publicDataTreeRoot: Buffer; }; @@ -136,23 +137,26 @@ export interface MerkleTreeOperations { getPreviousValueIndex( treeId: IndexedTreeId, value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }>; + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + >; /** * Returns the data at a specific leaf. * @param treeId - The tree for which leaf data should be returned. * @param index - The index of the leaf required. */ - getLeafData(treeId: IndexedTreeId, index: number): Promise; + getLeafPreimage(treeId: IndexedTreeId, index: bigint): Promise; /** * Update the leaf data at the given index. @@ -160,7 +164,7 @@ export interface MerkleTreeOperations { * @param leaf - The updated leaf value. * @param index - The index of the leaf to be updated. */ - updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: LeafData | Buffer, index: bigint): Promise; + updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: NullifierLeafPreimage | Buffer, index: bigint): Promise; /** * Returns the index containing a leaf value. @@ -177,11 +181,11 @@ export interface MerkleTreeOperations { getLeafValue(treeId: MerkleTreeId, index: bigint): Promise; /** - * Inserts the new block hash into the new block hashes tree. + * Inserts the new block hash into the archive. * This includes all of the current roots of all of the data trees and the current blocks global vars. * @param globalVariablesHash - The global variables hash to insert into the block hash. */ - updateBlocksTree(globalVariablesHash: Fr): Promise; + updateArchive(globalVariablesHash: Fr): Promise; /** * Updates the latest global variables hash diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 311c071d8b1..4ebcdb101c2 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -1,5 +1,5 @@ import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_TREE_HEIGHT, Fr, GlobalVariables, @@ -7,12 +7,15 @@ import { NOTE_HASH_TREE_HEIGHT, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + NullifierLeaf, + NullifierLeafPreimage, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; import { computeBlockHash, computeGlobalsHash } from '@aztec/circuits.js/abis'; import { Committable } from '@aztec/foundation/committable'; import { SerialQueue } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { AppendOnlyTree, BatchInsertionResult, @@ -25,7 +28,7 @@ import { loadTree, newTree, } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { Hasher, L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; import { default as levelup } from 'levelup'; @@ -53,6 +56,15 @@ interface FromDbOptions { const LAST_GLOBAL_VARS_HASH = 'lastGlobalVarsHash'; +/** + * The nullifier tree is an indexed tree. + */ +class NullifierTree extends StandardIndexedTree { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + /** * A convenience class for managing multiple merkle trees. */ @@ -82,7 +94,7 @@ export class MerkleTrees implements MerkleTreeDb { CONTRACT_TREE_HEIGHT, ); const nullifierTree = await initializeTree( - StandardIndexedTree, + NullifierTree, this.db, hasher, `${MerkleTreeId[MerkleTreeId.NULLIFIER_TREE]}`, @@ -110,14 +122,14 @@ export class MerkleTrees implements MerkleTreeDb { `${MerkleTreeId[MerkleTreeId.L1_TO_L2_MESSAGES_TREE]}`, L1_TO_L2_MSG_TREE_HEIGHT, ); - const blocksTree: AppendOnlyTree = await initializeTree( + const archive: AppendOnlyTree = await initializeTree( StandardTree, this.db, hasher, - `${MerkleTreeId[MerkleTreeId.BLOCKS_TREE]}`, - BLOCKS_TREE_HEIGHT, + `${MerkleTreeId[MerkleTreeId.ARCHIVE]}`, + ARCHIVE_HEIGHT, ); - this.trees = [contractTree, nullifierTree, noteHashTree, publicDataTree, l1Tol2MessagesTree, blocksTree]; + this.trees = [contractTree, nullifierTree, noteHashTree, publicDataTree, l1Tol2MessagesTree, archive]; this.jobQueue.start(); @@ -125,7 +137,7 @@ export class MerkleTrees implements MerkleTreeDb { if (!fromDb) { const initialGlobalVariablesHash = computeGlobalsHash(GlobalVariables.empty()); await this._updateLatestGlobalVariablesHash(initialGlobalVariablesHash); - await this._updateBlocksTree(initialGlobalVariablesHash, true); + await this._updateArchive(initialGlobalVariablesHash, true); await this._commit(); } else { await this._updateLatestGlobalVariablesHash(fromDbOptions.globalVariablesHash); @@ -177,8 +189,8 @@ export class MerkleTrees implements MerkleTreeDb { * @param globalsHash - The current global variables hash. * @param includeUncommitted - Indicates whether to include uncommitted data. */ - public async updateBlocksTree(globalsHash: Fr, includeUncommitted: boolean) { - await this.synchronize(() => this._updateBlocksTree(globalsHash, includeUncommitted)); + public async updateArchive(globalsHash: Fr, includeUncommitted: boolean) { + await this.synchronize(() => this._updateArchive(globalsHash, includeUncommitted)); } /** @@ -221,7 +233,7 @@ export class MerkleTrees implements MerkleTreeDb { contractDataTreeRoot: roots[2], l1Tol2MessagesTreeRoot: roots[3], publicDataTreeRoot: roots[4], - blocksTreeRoot: roots[5], + archiveRoot: roots[5], }; } @@ -237,7 +249,7 @@ export class MerkleTrees implements MerkleTreeDb { MerkleTreeId.CONTRACT_TREE, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, MerkleTreeId.PUBLIC_DATA_TREE, - MerkleTreeId.BLOCKS_TREE, + MerkleTreeId.ARCHIVE, ].map(tree => this.trees[tree].getRoot(includeUncommitted)); return Promise.resolve(roots); @@ -310,19 +322,20 @@ export class MerkleTrees implements MerkleTreeDb { treeId: IndexedTreeId, value: bigint, includeUncommitted: boolean, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { - return await this.synchronize(() => - Promise.resolve(this._getIndexedTree(treeId).findIndexOfPreviousValue(value, includeUncommitted)), - ); + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { + return await this.synchronize(() => this._getIndexedTree(treeId).findIndexOfPreviousKey(value, includeUncommitted)); } /** @@ -330,15 +343,15 @@ export class MerkleTrees implements MerkleTreeDb { * @param treeId - The ID of the tree get the leaf from. * @param index - The index of the leaf to get. * @param includeUncommitted - Indicates whether to include uncommitted data. - * @returns Leaf data. + * @returns Leaf preimage. */ - public async getLeafData( + public async getLeafPreimage( treeId: IndexedTreeId, - index: number, + index: bigint, includeUncommitted: boolean, - ): Promise { + ): Promise { return await this.synchronize(() => - Promise.resolve(this._getIndexedTree(treeId).getLatestLeafDataCopy(index, includeUncommitted)), + this._getIndexedTree(treeId).getLatestLeafPreimageCopy(index, includeUncommitted), ); } @@ -356,13 +369,7 @@ export class MerkleTrees implements MerkleTreeDb { ): Promise { return await this.synchronize(async () => { const tree = this.trees[treeId]; - for (let i = 0n; i < tree.getNumLeaves(includeUncommitted); i++) { - const currentValue = await tree.getLeafValue(i, includeUncommitted); - if (currentValue && currentValue.equals(value)) { - return i; - } - } - return undefined; + return await tree.findLeafIndex(value, includeUncommitted); }); } @@ -373,7 +380,7 @@ export class MerkleTrees implements MerkleTreeDb { * @param index - The index to insert into. * @returns Empty promise. */ - public async updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: LeafData | Buffer, index: bigint): Promise { + public async updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: Buffer, index: bigint): Promise { return await this.synchronize(() => this._updateLeaf(treeId, leaf, index)); } @@ -427,9 +434,9 @@ export class MerkleTrees implements MerkleTreeDb { return Promise.resolve(this.latestGlobalVariablesHash.get(includeUncommitted)); } - private async _updateBlocksTree(globalsHash: Fr, includeUncommitted: boolean) { + private async _updateArchive(globalsHash: Fr, includeUncommitted: boolean) { const blockHash = await this._getCurrentBlockHash(globalsHash, includeUncommitted); - await this._appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this._appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); } /** @@ -486,11 +493,7 @@ export class MerkleTrees implements MerkleTreeDb { return await tree.appendLeaves(leaves); } - private async _updateLeaf( - treeId: IndexedTreeId | PublicTreeId, - leaf: LeafData | Buffer, - index: bigint, - ): Promise { + private async _updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: Buffer, index: bigint): Promise { const tree = this.trees[treeId]; if (!('updateLeaf' in tree)) { throw new Error('Tree does not support `updateLeaf` method'); @@ -542,7 +545,7 @@ export class MerkleTrees implements MerkleTreeDb { [l2Block.endNoteHashTreeSnapshot.root, MerkleTreeId.NOTE_HASH_TREE], [l2Block.endPublicDataTreeRoot, MerkleTreeId.PUBLIC_DATA_TREE], [l2Block.endL1ToL2MessagesTreeSnapshot.root, MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - [l2Block.endBlocksTreeSnapshot.root, MerkleTreeId.BLOCKS_TREE], + [l2Block.endArchiveSnapshot.root, MerkleTreeId.ARCHIVE], ] as const; const compareRoot = (root: Fr, treeId: MerkleTreeId) => { const treeRoot = this.trees[treeId].getRoot(true); @@ -589,7 +592,7 @@ export class MerkleTrees implements MerkleTreeDb { this.log(`Synced global variables with hash ${globalVariablesHash}`); const blockHash = await this._getCurrentBlockHash(globalVariablesHash, true); - await this._appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this._appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); await this._commit(); } diff --git a/yarn-project/yarn-project-base/Dockerfile b/yarn-project/yarn-project-base/Dockerfile index a6ed174d487..96be5e824c3 100644 --- a/yarn-project/yarn-project-base/Dockerfile +++ b/yarn-project/yarn-project-base/Dockerfile @@ -47,13 +47,13 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js as bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir as noir FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-packages as noir-packages -FROM node:18-alpine -RUN apk update && apk add --no-cache bash jq curl +FROM node:18.19.0 +RUN apt update && apt install -y jq curl perl && rm -rf /var/lib/apt/lists/* && apt-get clean # Copy L1 contracts. COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts # Copy in bb.js -COPY --from=bb.js /usr/src/barretenberg/ts/package /usr/src/barretenberg/ts/package +COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts # Copy in nargo COPY --from=noir /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo # Copy in noir packages @@ -75,15 +75,19 @@ WORKDIR /usr/src/yarn-project # The dockerignore file ensures the context only contains package.json and tsconfig.json files. COPY . . +# List all included files and hash for debugging. +RUN echo "Context files: " && find . -type f | sort && \ + echo -n "Context hash: " && find . -type f -print0 | sort -z | xargs -0 sha256sum | sha256sum + # Install packages and rebuild the global cache with hard links. # TODO: Puppeteer is adding ~300MB to this image due to chrome download (part of e2e). # Switch to using puppeteer-core then it won't download chrome. For now just erase. RUN yarn --immutable && rm -rf /root/.cache/puppeteer && /bin/bash -c '\ - rm -rf /root/.yarn/berry/cache/* && \ - cd .yarn/cache && \ - for F in *; do \ - [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ - done' + rm -rf /root/.yarn/berry/cache/* && \ + cd .yarn/cache && \ + for F in *; do \ + [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ + done' # If everything's worked properly, we should no longer need access to the network. RUN echo "enableNetwork: false" >> .yarnrc.yml diff --git a/yarn-project/yarn-project-base/Dockerfile.dockerignore b/yarn-project/yarn-project-base/Dockerfile.dockerignore index a6ba1856c45..257a2d74457 100644 --- a/yarn-project/yarn-project-base/Dockerfile.dockerignore +++ b/yarn-project/yarn-project-base/Dockerfile.dockerignore @@ -6,7 +6,9 @@ .* README.md bootstrap.sh -Dockerfile +Dockerfile* +*.tsbuildinfo +node_modules # This is a sticking point, due to the project being under it's own dir. # Need to unexclude the dir and then exclude it's files. @@ -17,10 +19,12 @@ Dockerfile !boxes/blank !boxes/blank-react boxes/*/* +!boxes/*/package.json +!boxes/*/tsconfig.json # Unexclude package.json and yarn.lock files, for detecting any dependency changes. -!**/package.json -!**/package.*.json +!*/package.json +!*/package.*.json !yarn.lock # Unexclude parts of yarn related config as this also affects how dependencies are installed. @@ -30,7 +34,7 @@ boxes/*/* !.yarn/patches # Unexclude tsconfig files for running project reference checks. -!**/tsconfig.json +!*/tsconfig.json # Unexclude scripts we use in the Dockerfile. !yarn-project-base/scripts diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 9673d77e245..e547b730aa1 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -277,9 +277,9 @@ __metadata: languageName: unknown linkType: soft -"@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A.": +"@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A.": version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A." + resolution: "@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -547,6 +547,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/merkle-tree@workspace:merkle-tree" dependencies: + "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 @@ -7332,9 +7333,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" - "@aztec/types": "workspace:^" "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 "@types/node": ^20.5.9 @@ -7383,8 +7381,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 "@typescript-eslint/eslint-plugin": ^6.0.0 @@ -19099,9 +19095,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" - "@aztec/types": "workspace:^" "@jest/globals": ^29.6.4 "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 diff --git a/yellow-paper/Dockerfile b/yellow-paper/Dockerfile index 1d9939128e9..ab8cb91196d 100644 --- a/yellow-paper/Dockerfile +++ b/yellow-paper/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:18.19.0-alpine WORKDIR /usr/src COPY . . RUN yarn && yarn build --no-minify \ No newline at end of file diff --git a/yellow-paper/docs/calls/public_private_messaging.md b/yellow-paper/docs/calls/public_private_messaging.md index c418429f2e4..241c796df9f 100644 --- a/yellow-paper/docs/calls/public_private_messaging.md +++ b/yellow-paper/docs/calls/public_private_messaging.md @@ -10,33 +10,55 @@ sidebar_position: 5 This is a draft. These requirements need to be considered by the wider team, and might change significantly before a mainnet release. ::: -Private functions work by providing evidence of correct execution generated locally through kernel proofs. Public functions, on the other hand, are able to utilize the latest state to manage updates and perform alterations. As such, public state and private state are in different trees. In a private function you cannot reference or modify public state and vice versa. - +Public state and private state exist in different trees. In a private function you cannot reference or modify public state. Yet, it should be possible for: 1. private functions to call private or public functions 2. public functions to call private or public functions -For private execution, the user executed methods locally and presents evidence of correct execution as part of their transaction in the form of a kernel proof (generated locally on user device ahead of time). This way, the builder doesn't need to have knowledge of everything happening in the transaction, only the results. However, public functions are executed at the "tip" of the chain (i.e. make use of the latest updates), they can only be done by a builder who is aware of all the changes. Therefore a public function can't be executed locally by the user in the same way a private function is, as it would lead to race conditions, if the user doesn't keep track of the latest updates of the chain. If we were to build this public proof on the latest state, we would encounter problems. How can two different users build proofs at the same time, given that they will be executed one after the other by the sequencer? The simple answer is that they cannot, as race conditions would arise where one of the proofs would be invalidated by the other due to a change in the state root (which would nullify Merkle paths). +Private functions are executed locally by the user and work by providing evidence of correct execution generated locally through kernel proofs. This way, the sequencer doesn't need to have knowledge of everything happening in the transaction, only the results. Public functions, on the other hand, are able to utilize the latest state to manage updates and perform alterations, as they are executed by the sequencer. -As a result, private functions are always executed first, as they are executed on a state $S_i$, where $i \le n$, with $S_n$ representing the current state where the public functions always operate on the current state $S_n$. +Therefore, private functions are always executed first, as they are executed on a state $S_i$, where $i \le n$, with $S_n$ representing the current state where the public functions always operate on the current state $S_n$. This enables private functions to enqueue calls to public functions. But vice-versa is not true. Since private functions execute first, it cannot "wait" on the results of any of their calls to public functions. Stated differently, any calls made across domains are unilateral in nature. The figure below shows the order of function calls on the left-hand side, while the right-hand side shows how the functions will be executed. Notably, the second private function call is independent of the output of the public function and merely occurs after its execution. -![Public - Private Ordering](./images/calls/pvt_pub_ordering.png) +Tx call order be: +```mermaid +graph TD + A[Private Function 1] -->|Calls| B(Public Function 1) + A -----> |Followed by| C[Private Function 2] +``` -## Private to Public Messaging -If a private function in an Aztec smart contract wants to call a public function, it gets pushed into a separate public call stack that is enqueued. The private kernel circuit which must prove the execution of the private function(s), then hashes each of the item in the call stack and returns that. The private kernel proof, the public inputs of the private kernel (which contain the hash of the each of the public call stack item) and other transaction data (like enqueued public function calls, new commitments, nullifiers etc) get passed along to the sequencer. Sequencer then picks up the public call stack item and executes each of the functions. The Public VM which executes the methods then verifies that the hash provided by the private kernel matches the current call stack item. +But Tx execution order will be + +```mermaid +graph TD + A[Private Function 1] -->|Calls| B(Private Function 2) + A -----> |Followed by| C[Public Function 1] +``` -This way, you can destroy your private state and create them in public within the same transaction or indirectly assert constraints on the execution of the private functions with latest data. +## Private to Public Messaging +When a private function calls a public function: +1. Public function args get hashed together +1. A public call stack item is created with the public function selector, it's contract address and args hash +1. The hash of the item gets enqueued into a separate public call stack and passed as inputs to the private kernel +1. The private kernel pushes these hashes into the public input, which the sequencer can see. +1. PXE creates a transaction object as outlined [here](../transactions/tx-object.md) where it passes the hashes and the actual call stack item +1. PXE sends the transaction to the sequencer. +1. Sequencer then picks up the public call stack item and executes each of the functions. +1. The Public VM which executes the methods then verifies that the hash provided by the private kernel matches the current call stack item. ### Handling Privacy Leakage and `msg.sender` In the above design, the sequencer only sees the public part of the call stack along with any new commitments, nullifiers etc that were created in the private transaction i.e. should learns nothing more of the private transaction (such as its origin, execution logic etc). -But what if the enqueued public function makes use of `msg_sender` which is meant to use +:::warning +TODO: Haven't finalized what msg.sender will be +::: + +Within the context of these enqueued public functions, any usage of `msg_sender` should return **TODO**. If the `msg_sender` is the actual user, then it leaks privacy. If `msg_sender` is the contract address, this leaks which contract is calling the public method and therefore leaks which contract the user was interacting with in private land. -Specifically, when the call stack is passed to the kernel circuit, the kernel should assert the `msg_sender` is 0 and hash appropriately. `msg_sender` could be the contract address too instead of `0`, but it leaks which contract is calling the public method and therefore leaks which contract the user was interacting with in private land. +Therefore, when the call stack is passed to the kernel circuit, the kernel should assert the `msg_sender` is 0 and hash appropriately. ### Reverts diff --git a/yellow-paper/docs/public-vm/Types.mdx b/yellow-paper/docs/public-vm/Types.mdx deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/yellow-paper/docs/public-vm/alu.md b/yellow-paper/docs/public-vm/alu.md index bd83dde6a23..d4c0066d936 100644 --- a/yellow-paper/docs/public-vm/alu.md +++ b/yellow-paper/docs/public-vm/alu.md @@ -6,7 +6,7 @@ This component of the VM circuit evaluates both base-2 arithmetic operations and The following block diagram maps out an draft of the internal components of the "ALU" -![](./gen/images/alu/alu.png) +![](./images/alu.png) Notes: diff --git a/yellow-paper/docs/public-vm/control-flow.md b/yellow-paper/docs/public-vm/control-flow.md index 707e78ecf8b..836697fa346 100644 --- a/yellow-paper/docs/public-vm/control-flow.md +++ b/yellow-paper/docs/public-vm/control-flow.md @@ -14,7 +14,7 @@ The intention is for sub-operations to be implementable as independent VM circui # Control flow -![](./gen/images/control-flow/avm-control-flow.png) +![](./images/avm-control-flow.png) > Notation note: whenever the VM "sends a signal" to one or more VM components, this is analogous to defining a boolean column in the execution trace that toggles on/off specific functionality diff --git a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx index 12b44617024..2c1cc93dd04 100644 --- a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx +++ b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx @@ -1,7 +1,7 @@ [comment]: # (THIS IS A GENERATED FILE! DO NOT EDIT!) [comment]: # (Generated via `yarn preprocess`) -[comment]: # (Generated by InstructionSetMarkdownGen.tsx and InstructionSet.js) +[comment]: # (Generated by genMarkdown.js, InstructionSet.js, InstructionSize.js) import Markdown from 'react-markdown' import CodeBlock from '@theme/CodeBlock' @@ -16,7 +16,7 @@ Click on an instruction name to jump to its section. 0x00 [`ADD`](#isa-section-add) Addition (a + b) - 96 + 128 { `M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k` } @@ -24,7 +24,7 @@ Click on an instruction name to jump to its section. 0x01 [`SUB`](#isa-section-sub) Subtraction (a - b) - 96 + 128 { `M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k` } @@ -32,7 +32,7 @@ Click on an instruction name to jump to its section. 0x02 [`DIV`](#isa-section-div) Unsigned division (a / b) - 96 + 128 { `M[dstOffset] = M[aOffset] / M[bOffset]` } @@ -40,7 +40,7 @@ Click on an instruction name to jump to its section. 0x03 [`EQ`](#isa-section-eq) Equality check (a == b) - 96 + 128 { `M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0` } @@ -48,7 +48,7 @@ Click on an instruction name to jump to its section. 0x04 [`LT`](#isa-section-lt) Less-than check (a < b) - 96 + 128 { `M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0` } @@ -56,7 +56,7 @@ Click on an instruction name to jump to its section. 0x05 [`LTE`](#isa-section-lte) Less-than-or-equals check (a <= b) - 96 + 128 { `M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0` } @@ -64,7 +64,7 @@ Click on an instruction name to jump to its section. 0x06 [`AND`](#isa-section-and) Bitwise AND (a & b) - 96 + 128 { `M[dstOffset] = M[aOffset] AND M[bOffset]` } @@ -72,7 +72,7 @@ Click on an instruction name to jump to its section. 0x07 [`OR`](#isa-section-or) Bitwise OR (a | b) - 96 + 128 { `M[dstOffset] = M[aOffset] OR M[bOffset]` } @@ -80,7 +80,7 @@ Click on an instruction name to jump to its section. 0x08 [`XOR`](#isa-section-xor) Bitwise XOR (a ^ b) - 96 + 128 { `M[dstOffset] = M[aOffset] XOR M[bOffset]` } @@ -88,7 +88,7 @@ Click on an instruction name to jump to its section. 0x09 [`NOT`](#isa-section-not) Bitwise NOT (inversion) - 72 + 96 { `M[dstOffset] = NOT M[aOffset]` } @@ -96,7 +96,7 @@ Click on an instruction name to jump to its section. 0x0a [`SHL`](#isa-section-shl) Bitwise leftward shift (a << b) - 96 + 128 { `M[dstOffset] = M[aOffset] << M[bOffset]` } @@ -104,7 +104,7 @@ Click on an instruction name to jump to its section. 0x0b [`SHR`](#isa-section-shr) Bitwise rightward shift (a >> b) - 96 + 128 { `M[dstOffset] = M[aOffset] >> M[bOffset]` } @@ -112,15 +112,15 @@ Click on an instruction name to jump to its section. 0x0c [`CAST`](#isa-section-cast) Type cast - 72 + 96 { - `M[dstOffset] = cast(M[aOffset])` + `M[dstOffset] = cast(M[aOffset])` } 0x0d [`SET`](#isa-section-set) Set a memory word from a constant in the bytecode. - 48+N + 64+N { `M[dstOffset] = const` } @@ -128,7 +128,7 @@ Click on an instruction name to jump to its section. 0x0e [`MOV`](#isa-section-mov) Move a word from source memory location to destination`. - 64 + 88 { `M[dstOffset] = M[srcOffset]` } @@ -136,7 +136,7 @@ Click on an instruction name to jump to its section. 0x0f [`CMOV`](#isa-section-cmov) Move a word (conditionally chosen) from one memory location to another (`d = cond > 0 ? a : b`). - 112 + 152 { `M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]` } @@ -144,15 +144,15 @@ Click on an instruction name to jump to its section. 0x10 [`CALLDATACOPY`](#isa-section-calldatacopy) Copy calldata into memory. - 88 + 120 { - `M[dstOffset:dstOffset+size] = calldata[cdOffset:cdOffset+size]` + `M[dstOffset:dstOffset+copySize] = calldata[cdOffset:cdOffset+copySize]` } 0x11 [`SLOAD`](#isa-section-sload) Load a word from storage. - 64 + 88 { `M[dstOffset] = storage[M[slotOffset]]` } @@ -160,7 +160,7 @@ Click on an instruction name to jump to its section. 0x12 [`SSTORE`](#isa-section-sstore) Write a word to storage. - 64 + 88 { `storage[M[slotOffset]] = M[srcOffset]` } @@ -168,25 +168,25 @@ Click on an instruction name to jump to its section. 0x13 [`EMITNOTEHASH`](#isa-section-emitnotehash) Emit a new note hash to be inserted into the notes tree - 40 + 56 emitNoteHash(M[contentOffset]) 0x14 [`EMITNULLIFIER`](#isa-section-emitnullifier) Emit a new nullifier to be inserted into the nullifier tree - 40 + 56 emitNullifier(M[nullifierOffset]) 0x15 [`SENDL2TOL1MSG`](#isa-section-sendl2tol1msg) Send an L2-to-L1 message - 40 + 56 sendL2ToL1Message(M[contentOffset]) 0x16 [`JUMP`](#isa-section-jump) Jump to a location in the bytecode. - 32 + 48 { `PC = loc` } @@ -194,7 +194,7 @@ Click on an instruction name to jump to its section. 0x17 [`JUMPI`](#isa-section-jumpi) Conditionally jump to a location in the bytecode. - 64 + 88 { `PC = M[condOffset] > 0 ? loc : PC` } @@ -202,26 +202,26 @@ Click on an instruction name to jump to its section. 0x18 [`RETURN`](#isa-section-return) Halt execution with `success`, optionally returning some data. - 64 + 88 { - `return(M[offset:offset+size])` + `return(M[retOffset:retOffset+retSize])` } 0x19 [`REVERT`](#isa-section-revert) Halt execution with `failure`, reverting state changes and optionally returning some data. - 64 + 88 { - `revert(M[offset:offset+size])` + `revert(M[retOffset:retOffset+retSize])` } 0x1a [`CALL`](#isa-section-call) Call into another contract. - 208 + 248 {`M[successOffset] = call( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize])`} @@ -229,10 +229,10 @@ Click on an instruction name to jump to its section. 0x1b [`STATICCALL`](#isa-section-staticcall) Call into another contract, disallowing persistent state modifications. - 208 + 248 {`M[successOffset] = staticcall( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize])`} @@ -240,15 +240,15 @@ Click on an instruction name to jump to its section. 0x1c [`ULOG`](#isa-section-ulog) Emit an unencrypted log with data from the `field` memory page - 64 + 88 { - `ulog(M[offset:offset+size])` + `ulog(M[logOffset:logOffset+logSize])` } 0x1d [`CHAINID`](#isa-section-chainid) Get this rollup's L1 chain ID - 40 + 56 { `M[dstOffset] = Globals.chainId` } @@ -256,7 +256,7 @@ Click on an instruction name to jump to its section. 0x1e [`VERSION`](#isa-section-version) Get this rollup's L2 version ID - 40 + 56 { `M[dstOffset] = Globals.version` } @@ -264,7 +264,7 @@ Click on an instruction name to jump to its section. 0x1f [`BLOCKNUMBER`](#isa-section-blocknumber) Get this block's number - 40 + 56 { `M[dstOffset] = Globals.blocknumber` } @@ -272,7 +272,7 @@ Click on an instruction name to jump to its section. 0x20 [`TIMESTAMP`](#isa-section-timestamp) Get this L2 block's timestamp - 40 + 56 { `M[dstOffset] = Globals.timestamp` } @@ -280,7 +280,7 @@ Click on an instruction name to jump to its section. 0x21 [`COINBASE`](#isa-section-coinbase) Get the block's beneficiary address - 40 + 56 { `M[dstOffset] = Globals.coinbase` } @@ -288,7 +288,7 @@ Click on an instruction name to jump to its section. 0x22 [`BLOCKL1GASLIMIT`](#isa-section-blockl1gaslimit) Total amount of "L1 gas" that a block can consume - 40 + 56 { `M[dstOffset] = Globals.l1GasLimit` } @@ -296,7 +296,7 @@ Click on an instruction name to jump to its section. 0x23 [`BLOCKL2GASLIMIT`](#isa-section-blockl2gaslimit) Total amount of "L2 gas" that a block can consume - 40 + 56 { `M[dstOffset] = Globals.l2GasLimit` } @@ -304,7 +304,7 @@ Click on an instruction name to jump to its section. 0x24 [`NOTESROOT`](#isa-section-notesroot) Get the historical note-hash tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].note_hash_tree_root` } @@ -312,7 +312,7 @@ Click on an instruction name to jump to its section. 0x25 [`NULLIFIERSROOT`](#isa-section-nullroot) Get the historical nullifier tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].nullifier_tree_root` } @@ -320,7 +320,7 @@ Click on an instruction name to jump to its section. 0x26 [`CONTRACTSROOT`](#isa-section-contractsroot) Get the historical contracts tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].contracts_tree_root` } @@ -328,7 +328,7 @@ Click on an instruction name to jump to its section. 0x27 [`MSGSROOT`](#isa-section-msgsroot) Get the historical l1-to-l2 messages tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].l1_to_l2_messages_tree_root` } @@ -336,7 +336,7 @@ Click on an instruction name to jump to its section. 0x28 [`NOTESROOT`](#isa-section-notesroot) Get the historical note-hash tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].note_hash_tree_root` } @@ -344,7 +344,7 @@ Click on an instruction name to jump to its section. 0x29 [`PUBLICDATAROOT`](#isa-section-publicdataroot) Get the historical public data tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].public_data_tree_root` } @@ -352,7 +352,7 @@ Click on an instruction name to jump to its section. 0x2a [`GLOBALSHASH`](#isa-section-globalshash) Get the historical global variables hash as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].global_variables_hash` } @@ -360,15 +360,15 @@ Click on an instruction name to jump to its section. 0x2b [`BLOCKSROOT`](#isa-section-blocksroot) Get the historical blocks tree root as of the specified block number. - 64 + 88 { - `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root` + `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root` } 0x2c [`GRANDROOT`](#isa-section-grandroot) Get the historical grandfather tree root as of the specified block number. - 64 + 88 { `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].grandfather_tree_root` } @@ -376,7 +376,7 @@ Click on an instruction name to jump to its section. 0x2d [`ORIGIN`](#isa-section-origin) Get the transaction's origination address - 40 + 56 { `M[dstOffset] = TxContext.origin` } @@ -384,7 +384,7 @@ Click on an instruction name to jump to its section. 0x2e [`REFUNDEE`](#isa-section-refundee) The recipient of fee refunds for this transaction - 40 + 56 { `M[dstOffset] = TxContext.refundee` } @@ -392,7 +392,7 @@ Click on an instruction name to jump to its section. 0x2f [`FEEPERL1GAS`](#isa-section-feeperl1gas) The fee to be paid per "L1 gas" - set by the transaction's original caller - 40 + 56 { `M[dstOffset] = TxContext.feePerL1Gas` } @@ -400,7 +400,7 @@ Click on an instruction name to jump to its section. 0x30 [`FEEPERL2GAS`](#isa-section-feeperl2gas) The fee to be paid per "L2 gas" - set by the transaction's original caller - 40 + 56 { `M[dstOffset] = TxContext.feePerL2Gas` } @@ -408,7 +408,7 @@ Click on an instruction name to jump to its section. 0x31 [`CALLER`](#isa-section-caller) Get the address of the sender (the caller's context) - 40 + 56 { `M[dstOffset] = CallContext.sender` } @@ -416,7 +416,7 @@ Click on an instruction name to jump to its section. 0x32 [`ADDRESS`](#isa-section-address) Get the address of the currently executing l2 contract - 40 + 56 { `M[dstOffset] = CallContext.storageContractAddress` } @@ -424,7 +424,7 @@ Click on an instruction name to jump to its section. 0x33 [`PORTAL`](#isa-section-portal) Get the address of the l1 portal contract - 40 + 56 { `M[dstOffset] = CallContext.portalAddress` } @@ -432,7 +432,7 @@ Click on an instruction name to jump to its section. 0x34 [`CALLDEPTH`](#isa-section-calldepth) Get how many calls deep the current call context is - 40 + 56 { `M[dstOffset] = CallContext.calldepth` } @@ -440,7 +440,7 @@ Click on an instruction name to jump to its section. 0x35 [`L1GAS`](#isa-section-l1gas) Remaining "L1 gas" for this call (after this instruction). - 40 + 56 { `M[dstOffset] = LatestContext.l1Gas` } @@ -448,7 +448,7 @@ Click on an instruction name to jump to its section. 0x36 [`L2GAS`](#isa-section-l2gas) Remaining "L2 gas" for this call (after this instruction). - 40 + 56 { `M[dstOffset] = LatestContext.l2Gas` } @@ -466,17 +466,17 @@ Addition (a + b) - **Category**: arithmetic - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/ADD.png) +[![](./images/bit-formats/ADD.png)](./images/bit-formats/ADD.png) ### `SUB` (0x01) Subtraction (a - b) @@ -486,17 +486,17 @@ Subtraction (a - b) - **Category**: arithmetic - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/SUB.png) +[![](./images/bit-formats/SUB.png)](./images/bit-formats/SUB.png) ### `DIV` (0x02) Unsigned division (a / b) @@ -506,17 +506,17 @@ Unsigned division (a / b) - **Category**: arithmetic - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] / M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/DIV.png) +[![](./images/bit-formats/DIV.png)](./images/bit-formats/DIV.png) ### `EQ` (0x03) Equality check (a == b) @@ -526,17 +526,17 @@ Equality check (a == b) - **Category**: conditional - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/EQ.png) +[![](./images/bit-formats/EQ.png)](./images/bit-formats/EQ.png) ### `LT` (0x04) Less-than check (a < b) @@ -546,17 +546,17 @@ Less-than check (a < b) - **Category**: conditional - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/LT.png) +[![](./images/bit-formats/LT.png)](./images/bit-formats/LT.png) ### `LTE` (0x05) Less-than-or-equals check (a <= b) @@ -566,17 +566,17 @@ Less-than-or-equals check (a <= b) - **Category**: conditional - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/LTE.png) +[![](./images/bit-formats/LTE.png)](./images/bit-formats/LTE.png) ### `AND` (0x06) Bitwise AND (a & b) @@ -586,17 +586,17 @@ Bitwise AND (a & b) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] AND M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/AND.png) +[![](./images/bit-formats/AND.png)](./images/bit-formats/AND.png) ### `OR` (0x07) Bitwise OR (a | b) @@ -606,17 +606,17 @@ Bitwise OR (a | b) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] OR M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/OR.png) +[![](./images/bit-formats/OR.png)](./images/bit-formats/OR.png) ### `XOR` (0x08) Bitwise XOR (a ^ b) @@ -626,17 +626,17 @@ Bitwise XOR (a ^ b) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] XOR M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/XOR.png) +[![](./images/bit-formats/XOR.png)](./images/bit-formats/XOR.png) ### `NOT` (0x09) Bitwise NOT (inversion) @@ -646,16 +646,16 @@ Bitwise NOT (inversion) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = NOT M[aOffset]` -- **Tag checks**: `T[aOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 72 +- **Tag checks**: `T[aOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 96 -![](./images/bit-formats/NOT.png) +[![](./images/bit-formats/NOT.png)](./images/bit-formats/NOT.png) ### `SHL` (0x0a) Bitwise leftward shift (a << b) @@ -665,17 +665,17 @@ Bitwise leftward shift (a << b) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] << M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/SHL.png) +[![](./images/bit-formats/SHL.png)](./images/bit-formats/SHL.png) ### `SHR` (0x0b) Bitwise rightward shift (a >> b) @@ -685,17 +685,17 @@ Bitwise rightward shift (a >> b) - **Category**: bitwise - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. + - **in-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. - **Args**: - **aOffset**: memory offset of the operation's left input - **bOffset**: memory offset of the operation's right input - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = M[aOffset] >> M[bOffset]` -- **Tag checks**: `T[aOffset] == T[bOffset] == op-type` -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 96 +- **Tag checks**: `T[aOffset] == T[bOffset] == in-tag` +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 128 -![](./images/bit-formats/SHR.png) +[![](./images/bit-formats/SHR.png)](./images/bit-formats/SHR.png) ### `CAST` (0x0c) Type cast @@ -705,16 +705,16 @@ Type cast - **Category**: types - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **dest-type**: The [type/size](./Types) to tag the output with when different from `op-type`. + - **dst-tag**: The [tag/size](./state-model#tags-and-tagged-memory) to tag the destination with but not to check inputs against. - **Args**: - **aOffset**: memory offset of word to cast - **dstOffset**: memory offset specifying where to store operation's result -- **Expression**: `M[dstOffset] = cast(M[aOffset])` -- **Details**: Cast a word in memory based on the `dest-type` specified in the bytecode. Truncates when casting to a smaller type, left-zero-pads when casting to a larger type. -- **Tag updates**: `T[dstOffset] = dest-type` -- **Bit-size**: 72 +- **Expression**: `M[dstOffset] = cast(M[aOffset])` +- **Details**: Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates (`M[dstOffset] = M[aOffset] mod 2^dstsize`) when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details. +- **Tag updates**: `T[dstOffset] = dst-tag` +- **Bit-size**: 96 -![](./images/bit-formats/CAST.png) +[![](./images/bit-formats/CAST.png)](./images/bit-formats/CAST.png) ### `SET` (0x0d) Set a memory word from a constant in the bytecode. @@ -724,16 +724,16 @@ Set a memory word from a constant in the bytecode. - **Category**: memory - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - - **op-type**: The [type/size](./Types) to check inputs against and tag the output with. `field` type is NOT supported for SET. + - **in-tag**: The [type/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for SET. - **Args**: - **const**: an N-bit constant value from the bytecode to store in memory (any type except `field`) - **dstOffset**: memory offset specifying where to store the constant - **Expression**: `M[dstOffset] = const` -- **Details**: Set memory word at `dstOffset` to `const`'s immediate value. `const`'s bit-size (N) can be 8, 16, 32, 64, or 128 based on `op-type`. It _cannot be 254 (`field` type)_! -- **Tag updates**: `T[dstOffset] = op-type` -- **Bit-size**: 48+N +- **Details**: Set memory word at `dstOffset` to `const`'s immediate value. `const`'s bit-size (N) can be 8, 16, 32, 64, or 128 based on `in-tag`. It _cannot be 254 (`field` type)_! +- **Tag updates**: `T[dstOffset] = in-tag` +- **Bit-size**: 64+N -![](./images/bit-formats/SET.png) +[![](./images/bit-formats/SET.png)](./images/bit-formats/SET.png) ### `MOV` (0x0e) Move a word from source memory location to destination`. @@ -748,9 +748,9 @@ Move a word from source memory location to destination`. - **dstOffset**: memory offset specifying where to store that word - **Expression**: `M[dstOffset] = M[srcOffset]` - **Tag updates**: `T[dstOffset] = T[srcOffset]` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/MOV.png) +[![](./images/bit-formats/MOV.png)](./images/bit-formats/MOV.png) ### `CMOV` (0x0f) Move a word (conditionally chosen) from one memory location to another (`d = cond > 0 ? a : b`). @@ -768,9 +768,9 @@ Move a word (conditionally chosen) from one memory location to another (`d = con - **Expression**: `M[dstOffset] = M[condOffset] > 0 ? M[aOffset] : M[bOffset]` - **Details**: One of two source memory locations is chosen based on the condition. `T[condOffset]` is not checked because the greater-than-zero suboperation is the same regardless of type. - **Tag updates**: `T[dstOffset] = M[condOffset] > 0 ? T[aOffset] : T[bOffset]` -- **Bit-size**: 112 +- **Bit-size**: 152 -![](./images/bit-formats/CMOV.png) +[![](./images/bit-formats/CMOV.png)](./images/bit-formats/CMOV.png) ### `CALLDATACOPY` (0x10) Copy calldata into memory. @@ -782,14 +782,14 @@ Copy calldata into memory. - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - **cdOffset**: offset into calldata to copy from - - **size**: number of words to copy + - **copySize**: number of words to copy - **dstOffset**: memory offset specifying where to copy the first word to -- **Expression**: `M[dstOffset:dstOffset+size] = calldata[cdOffset:cdOffset+size]` +- **Expression**: `M[dstOffset:dstOffset+copySize] = calldata[cdOffset:cdOffset+copySize]` - **Details**: Calldata is read-only and cannot be directly operated on by other instructions. This instruction moves words from calldata into memory so they can be operated on normally. -- **Tag updates**: `T[dstOffset:dstOffset+size] = field` -- **Bit-size**: 88 +- **Tag updates**: `T[dstOffset:dstOffset+copySize] = field` +- **Bit-size**: 120 -![](./images/bit-formats/CALLDATACOPY.png) +[![](./images/bit-formats/CALLDATACOPY.png)](./images/bit-formats/CALLDATACOPY.png) ### `SLOAD` (0x11) Load a word from storage. @@ -805,9 +805,9 @@ Load a word from storage. - **Expression**: `M[dstOffset] = storage[M[slotOffset]]` - **Details**: Load a word from this contract's persistent public storage into memory. - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/SLOAD.png) +[![](./images/bit-formats/SLOAD.png)](./images/bit-formats/SLOAD.png) ### `SSTORE` (0x12) Write a word to storage. @@ -822,9 +822,9 @@ Write a word to storage. - **slotOffset**: memory offset containing the storage slot to store to - **Expression**: `storage[M[slotOffset]] = M[srcOffset]` - **Details**: Store a word from memory into this contract's persistent public storage. -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/SSTORE.png) +[![](./images/bit-formats/SSTORE.png)](./images/bit-formats/SSTORE.png) ### `EMITNOTEHASH` (0x13) Emit a new note hash to be inserted into the notes tree @@ -837,9 +837,9 @@ Emit a new note hash to be inserted into the notes tree - **Args**: - **noteHashOffset**: memory offset of the note hash - **Expression**: emitNoteHash(M[contentOffset]) -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/EMITNOTEHASH.png) +[![](./images/bit-formats/EMITNOTEHASH.png)](./images/bit-formats/EMITNOTEHASH.png) ### `EMITNULLIFIER` (0x14) Emit a new nullifier to be inserted into the nullifier tree @@ -852,9 +852,9 @@ Emit a new nullifier to be inserted into the nullifier tree - **Args**: - **nullifierOffset**: memory offset of nullifier - **Expression**: emitNullifier(M[nullifierOffset]) -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/EMITNULLIFIER.png) +[![](./images/bit-formats/EMITNULLIFIER.png)](./images/bit-formats/EMITNULLIFIER.png) ### `SENDL2TOL1MSG` (0x15) Send an L2-to-L1 message @@ -867,9 +867,9 @@ Send an L2-to-L1 message - **Args**: - **contentOffset**: memory offset of the message content - **Expression**: sendL2ToL1Message(M[contentOffset]) -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/SENDL2TOL1MSG.png) +[![](./images/bit-formats/SENDL2TOL1MSG.png)](./images/bit-formats/SENDL2TOL1MSG.png) ### `JUMP` (0x16) Jump to a location in the bytecode. @@ -881,9 +881,9 @@ Jump to a location in the bytecode. - **loc**: target location to jump to - **Expression**: `PC = loc` - **Details**: Target location is an immediate value (a constant in the bytecode). -- **Bit-size**: 32 +- **Bit-size**: 48 -![](./images/bit-formats/JUMP.png) +[![](./images/bit-formats/JUMP.png)](./images/bit-formats/JUMP.png) ### `JUMPI` (0x17) Conditionally jump to a location in the bytecode. @@ -898,9 +898,9 @@ Conditionally jump to a location in the bytecode. - **condOffset**: memory offset of the operations 'conditional' input - **Expression**: `PC = M[condOffset] > 0 ? loc : PC` - **Details**: Target location is an immediate value (a constant in the bytecode). `T[condOffset]` is not checked because the greater-than-zero suboperation is the same regardless of type. -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/JUMPI.png) +[![](./images/bit-formats/JUMPI.png)](./images/bit-formats/JUMPI.png) ### `RETURN` (0x18) Halt execution with `success`, optionally returning some data. @@ -911,13 +911,13 @@ Halt execution with `success`, optionally returning some data. - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - - **offset**: memory offset of first word to return - - **size**: number of words to return -- **Expression**: `return(M[offset:offset+size])` + - **retOffset**: memory offset of first word to return + - **retSize**: number of words to return +- **Expression**: `return(M[retOffset:retOffset+retSize])` - **Details**: Return control flow to the calling context/contract. -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/RETURN.png) +[![](./images/bit-formats/RETURN.png)](./images/bit-formats/RETURN.png) ### `REVERT` (0x19) Halt execution with `failure`, reverting state changes and optionally returning some data. @@ -928,13 +928,13 @@ Halt execution with `failure`, reverting state changes and optionally returning - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - - **offset**: memory offset of first word to return - - **size**: number of words to return -- **Expression**: `revert(M[offset:offset+size])` + - **retOffset**: memory offset of first word to return + - **retSize**: number of words to return +- **Expression**: `revert(M[retOffset:retOffset+retSize])` - **Details**: Return control flow to the calling context/contract. -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/REVERT.png) +[![](./images/bit-formats/REVERT.png)](./images/bit-formats/REVERT.png) ### `CALL` (0x1a) Call into another contract. @@ -945,8 +945,7 @@ Call into another contract. - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - - **l1GasOffset**: amount of L1 gas to provide to the callee - - **l2GasOffset**: amount of L2 gas to provide to the callee + - **gasOffset**: offset to two words containing `{l1Gas, l2Gas}`: amount of L1 and L2 gas to provide to the callee - **addrOffset**: address of the contract to call - **argsOffset**: memory offset to args (will become the callee's calldata) - **argsSize**: number of words to pass via callee's calldata @@ -956,22 +955,22 @@ Call into another contract. - **Expression**: {`M[successOffset] = call( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize])`} - **Details**: Creates a new CallContext, triggers execution of the corresponding contract code, and then resumes execution in the current CallContext. A non-existent contract or one with no code will return success. Nested call has an incremented `CallContext.calldepth`. -- **Tag checks**: `T[l1GasOffset] == T[l2GasOffset] == u32` +- **Tag checks**: `T[gasOffset] == T[gasOffset+1] == u32` - **Tag updates**: {`T[successOffset] = u8 T[retOffset:retOffset+retSize] = field`} -- **Bit-size**: 208 +- **Bit-size**: 248 -![](./images/bit-formats/CALL.png) +[![](./images/bit-formats/CALL.png)](./images/bit-formats/CALL.png) ### `STATICCALL` (0x1b) Call into another contract, disallowing persistent state modifications. @@ -982,8 +981,7 @@ Call into another contract, disallowing persistent state modifications. - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - - **l1GasOffset**: amount of L1 gas to provide to the callee - - **l2GasOffset**: amount of L2 gas to provide to the callee + - **gasOffset**: offset to two words containing `{l1Gas, l2Gas}`: amount of L1 and L2 gas to provide to the callee - **addrOffset**: address of the contract to call - **argsOffset**: memory offset to args (will become the callee's calldata) - **argsSize**: number of words to pass via callee's calldata @@ -993,20 +991,20 @@ Call into another contract, disallowing persistent state modifications. - **Expression**: {`M[successOffset] = staticcall( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize])`} - **Details**: Same as `CALL`, but the callee is cannot modify persistent state. Disallowed instructions are `SSTORE`, `ULOG`, `CALL`. -- **Tag checks**: `T[l1GasOffset] == T[l2GasOffset] == u32` +- **Tag checks**: `T[gasOffset] == T[gasOffset+1] == u32` - **Tag updates**: {`T[successOffset] = u8 T[retOffset:retOffset+retSize] = field`} -- **Bit-size**: 208 +- **Bit-size**: 248 -![](./images/bit-formats/STATICCALL.png) +[![](./images/bit-formats/STATICCALL.png)](./images/bit-formats/STATICCALL.png) ### `ULOG` (0x1c) Emit an unencrypted log with data from the `field` memory page @@ -1017,12 +1015,12 @@ Emit an unencrypted log with data from the `field` memory page - **Flags**: - **indirect**: Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`. - **Args**: - - **offset**: memory offset of the data to log - - **size**: number of words to log -- **Expression**: `ulog(M[offset:offset+size])` -- **Bit-size**: 64 + - **logOffset**: memory offset of the data to log + - **logSize**: number of words to log +- **Expression**: `ulog(M[logOffset:logOffset+logSize])` +- **Bit-size**: 88 -![](./images/bit-formats/ULOG.png) +[![](./images/bit-formats/ULOG.png)](./images/bit-formats/ULOG.png) ### `CHAINID` (0x1d) Get this rollup's L1 chain ID @@ -1036,9 +1034,9 @@ Get this rollup's L1 chain ID - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.chainId` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/CHAINID.png) +[![](./images/bit-formats/CHAINID.png)](./images/bit-formats/CHAINID.png) ### `VERSION` (0x1e) Get this rollup's L2 version ID @@ -1052,9 +1050,9 @@ Get this rollup's L2 version ID - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.version` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/VERSION.png) +[![](./images/bit-formats/VERSION.png)](./images/bit-formats/VERSION.png) ### `BLOCKNUMBER` (0x1f) Get this block's number @@ -1068,9 +1066,9 @@ Get this block's number - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.blocknumber` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/BLOCKNUMBER.png) +[![](./images/bit-formats/BLOCKNUMBER.png)](./images/bit-formats/BLOCKNUMBER.png) ### `TIMESTAMP` (0x20) Get this L2 block's timestamp @@ -1084,9 +1082,9 @@ Get this L2 block's timestamp - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.timestamp` - **Tag updates**: `T[dstOffset] = u64` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/TIMESTAMP.png) +[![](./images/bit-formats/TIMESTAMP.png)](./images/bit-formats/TIMESTAMP.png) ### `COINBASE` (0x21) Get the block's beneficiary address @@ -1100,9 +1098,9 @@ Get the block's beneficiary address - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.coinbase` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/COINBASE.png) +[![](./images/bit-formats/COINBASE.png)](./images/bit-formats/COINBASE.png) ### `BLOCKL1GASLIMIT` (0x22) Total amount of "L1 gas" that a block can consume @@ -1116,9 +1114,9 @@ Total amount of "L1 gas" that a block can consume - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.l1GasLimit` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/BLOCKL1GASLIMIT.png) +[![](./images/bit-formats/BLOCKL1GASLIMIT.png)](./images/bit-formats/BLOCKL1GASLIMIT.png) ### `BLOCKL2GASLIMIT` (0x23) Total amount of "L2 gas" that a block can consume @@ -1132,9 +1130,9 @@ Total amount of "L2 gas" that a block can consume - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = Globals.l2GasLimit` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/BLOCKL2GASLIMIT.png) +[![](./images/bit-formats/BLOCKL2GASLIMIT.png)](./images/bit-formats/BLOCKL2GASLIMIT.png) ### `NOTESROOT` (0x24) Get the historical note-hash tree root as of the specified block number. @@ -1149,9 +1147,9 @@ Get the historical note-hash tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].note_hash_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/NOTESROOT.png) +[![](./images/bit-formats/NOTESROOT.png)](./images/bit-formats/NOTESROOT.png) ### `NULLIFIERSROOT` (0x25) Get the historical nullifier tree root as of the specified block number. @@ -1166,9 +1164,9 @@ Get the historical nullifier tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].nullifier_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/NULLIFIERSROOT.png) +[![](./images/bit-formats/NULLIFIERSROOT.png)](./images/bit-formats/NULLIFIERSROOT.png) ### `CONTRACTSROOT` (0x26) Get the historical contracts tree root as of the specified block number. @@ -1183,9 +1181,9 @@ Get the historical contracts tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].contracts_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/CONTRACTSROOT.png) +[![](./images/bit-formats/CONTRACTSROOT.png)](./images/bit-formats/CONTRACTSROOT.png) ### `MSGSROOT` (0x27) Get the historical l1-to-l2 messages tree root as of the specified block number. @@ -1200,9 +1198,9 @@ Get the historical l1-to-l2 messages tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].l1_to_l2_messages_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/MSGSROOT.png) +[![](./images/bit-formats/MSGSROOT.png)](./images/bit-formats/MSGSROOT.png) ### `NOTESROOT` (0x28) Get the historical note-hash tree root as of the specified block number. @@ -1217,9 +1215,9 @@ Get the historical note-hash tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].note_hash_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/NOTESROOT.png) +[![](./images/bit-formats/NOTESROOT.png)](./images/bit-formats/NOTESROOT.png) ### `PUBLICDATAROOT` (0x29) Get the historical public data tree root as of the specified block number. @@ -1234,9 +1232,9 @@ Get the historical public data tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].public_data_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/PUBLICDATAROOT.png) +[![](./images/bit-formats/PUBLICDATAROOT.png)](./images/bit-formats/PUBLICDATAROOT.png) ### `GLOBALSHASH` (0x2a) Get the historical global variables hash as of the specified block number. @@ -1251,9 +1249,9 @@ Get the historical global variables hash as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].global_variables_hash` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/GLOBALSHASH.png) +[![](./images/bit-formats/GLOBALSHASH.png)](./images/bit-formats/GLOBALSHASH.png) ### `BLOCKSROOT` (0x2b) Get the historical blocks tree root as of the specified block number. @@ -1266,11 +1264,11 @@ Get the historical blocks tree root as of the specified block number. - **Args**: - **blockNumOffset**: memory offset of the block number input - **dstOffset**: memory offset specifying where to store operation's result -- **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root` +- **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/BLOCKSROOT.png) +[![](./images/bit-formats/BLOCKSROOT.png)](./images/bit-formats/BLOCKSROOT.png) ### `GRANDROOT` (0x2c) Get the historical grandfather tree root as of the specified block number. @@ -1285,9 +1283,9 @@ Get the historical grandfather tree root as of the specified block number. - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].grandfather_tree_root` - **Tag updates**: `T[dstOffset] = field` -- **Bit-size**: 64 +- **Bit-size**: 88 -![](./images/bit-formats/GRANDROOT.png) +[![](./images/bit-formats/GRANDROOT.png)](./images/bit-formats/GRANDROOT.png) ### `ORIGIN` (0x2d) Get the transaction's origination address @@ -1301,9 +1299,9 @@ Get the transaction's origination address - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = TxContext.origin` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/ORIGIN.png) +[![](./images/bit-formats/ORIGIN.png)](./images/bit-formats/ORIGIN.png) ### `REFUNDEE` (0x2e) The recipient of fee refunds for this transaction @@ -1317,9 +1315,9 @@ The recipient of fee refunds for this transaction - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = TxContext.refundee` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/REFUNDEE.png) +[![](./images/bit-formats/REFUNDEE.png)](./images/bit-formats/REFUNDEE.png) ### `FEEPERL1GAS` (0x2f) The fee to be paid per "L1 gas" - set by the transaction's original caller @@ -1333,9 +1331,9 @@ The fee to be paid per "L1 gas" - set by the transaction's original caller - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = TxContext.feePerL1Gas` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/FEEPERL1GAS.png) +[![](./images/bit-formats/FEEPERL1GAS.png)](./images/bit-formats/FEEPERL1GAS.png) ### `FEEPERL2GAS` (0x30) The fee to be paid per "L2 gas" - set by the transaction's original caller @@ -1349,9 +1347,9 @@ The fee to be paid per "L2 gas" - set by the transaction's original caller - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = TxContext.feePerL2Gas` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/FEEPERL2GAS.png) +[![](./images/bit-formats/FEEPERL2GAS.png)](./images/bit-formats/FEEPERL2GAS.png) ### `CALLER` (0x31) Get the address of the sender (the caller's context) @@ -1365,9 +1363,9 @@ Get the address of the sender (the caller's context) - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = CallContext.sender` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/CALLER.png) +[![](./images/bit-formats/CALLER.png)](./images/bit-formats/CALLER.png) ### `ADDRESS` (0x32) Get the address of the currently executing l2 contract @@ -1381,9 +1379,9 @@ Get the address of the currently executing l2 contract - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = CallContext.storageContractAddress` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/ADDRESS.png) +[![](./images/bit-formats/ADDRESS.png)](./images/bit-formats/ADDRESS.png) ### `PORTAL` (0x33) Get the address of the l1 portal contract @@ -1397,9 +1395,9 @@ Get the address of the l1 portal contract - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = CallContext.portalAddress` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/PORTAL.png) +[![](./images/bit-formats/PORTAL.png)](./images/bit-formats/PORTAL.png) ### `CALLDEPTH` (0x34) Get how many calls deep the current call context is @@ -1414,9 +1412,9 @@ Get how many calls deep the current call context is - **Expression**: `M[dstOffset] = CallContext.calldepth` - **Details**: Note: security issues with EVM's tx.origin can be resolved by asserting the `calldepth == 0`. - **Tag updates**: `T[dstOffset] = u8` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/CALLDEPTH.png) +[![](./images/bit-formats/CALLDEPTH.png)](./images/bit-formats/CALLDEPTH.png) ### `L1GAS` (0x35) Remaining "L1 gas" for this call (after this instruction). @@ -1430,9 +1428,9 @@ Remaining "L1 gas" for this call (after this instruction). - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = LatestContext.l1Gas` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/L1GAS.png) +[![](./images/bit-formats/L1GAS.png)](./images/bit-formats/L1GAS.png) ### `L2GAS` (0x36) Remaining "L2 gas" for this call (after this instruction). @@ -1446,6 +1444,6 @@ Remaining "L2 gas" for this call (after this instruction). - **dstOffset**: memory offset specifying where to store operation's result - **Expression**: `M[dstOffset] = LatestContext.l2Gas` - **Tag updates**: `T[dstOffset] = u32` -- **Bit-size**: 40 +- **Bit-size**: 56 -![](./images/bit-formats/L2GAS.png) +[![](./images/bit-formats/L2GAS.png)](./images/bit-formats/L2GAS.png) diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/ADD.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/ADD.png index 4dd6dcc56b3..ee1a32d95b7 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/ADD.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/ADD.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/ADDRESS.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/ADDRESS.png index 2c128e55cc3..4314acef2a1 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/ADDRESS.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/ADDRESS.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/AND.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/AND.png index 28699de3959..9bfc38479f1 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/AND.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/AND.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL1GASLIMIT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL1GASLIMIT.png index f9cbd7a4b40..20f5ded9390 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL1GASLIMIT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL1GASLIMIT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL2GASLIMIT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL2GASLIMIT.png index 9e1160f7e36..f952f689f04 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL2GASLIMIT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKL2GASLIMIT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKNUMBER.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKNUMBER.png index 55de72ff390..c7217f8452a 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKNUMBER.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKNUMBER.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKSROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKSROOT.png index cb678fd654b..d844d63bb4f 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKSROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/BLOCKSROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALL.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALL.png index c4a26fb27ef..a0170351666 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALL.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALL.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDATACOPY.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDATACOPY.png index 305917efe37..133d86a37d9 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDATACOPY.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDATACOPY.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDEPTH.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDEPTH.png index 382668bfe4a..1cca7c9e725 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDEPTH.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLDEPTH.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLER.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLER.png index a594244415a..dfb24671c96 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLER.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CALLER.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CAST.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CAST.png index 529a236c900..d663c342ae7 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CAST.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CAST.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CHAINID.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CHAINID.png index 2c468671c3f..d197552c9dc 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CHAINID.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CHAINID.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CMOV.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CMOV.png index 7c1ac378dd3..a46692edd84 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CMOV.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CMOV.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/COINBASE.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/COINBASE.png index f2e8eb1ff8e..72e2f88758d 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/COINBASE.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/COINBASE.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/CONTRACTSROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/CONTRACTSROOT.png index ddb3fc79680..47c6cb0f93c 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/CONTRACTSROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/CONTRACTSROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/DIV.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/DIV.png index 2a30fad2d28..58fee0f6853 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/DIV.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/DIV.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNOTEHASH.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNOTEHASH.png index 3f5f66fc40a..fd31987d317 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNOTEHASH.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNOTEHASH.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNULLIFIER.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNULLIFIER.png index d6e841d1c03..50c4328012c 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNULLIFIER.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/EMITNULLIFIER.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/EQ.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/EQ.png index 18bd16ed228..426d47621b0 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/EQ.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/EQ.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL1GAS.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL1GAS.png index fde3846b150..428bb22433d 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL1GAS.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL1GAS.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL2GAS.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL2GAS.png index d0c1443e816..db94bc190a3 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL2GAS.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/FEEPERL2GAS.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/GLOBALSHASH.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/GLOBALSHASH.png index b1fd91771f0..0cea4cc32d0 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/GLOBALSHASH.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/GLOBALSHASH.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/GRANDROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/GRANDROOT.png index 13638898ac8..34d66c5a331 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/GRANDROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/GRANDROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMP.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMP.png index d2048dc928c..9d9fd47b38b 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMP.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMP.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMPI.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMPI.png index 1c03b6a42de..326a7971d4a 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMPI.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/JUMPI.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/L1GAS.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/L1GAS.png index 66a53c795e6..7ec775889c5 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/L1GAS.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/L1GAS.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/L2GAS.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/L2GAS.png index 1b9668c46bd..fcf79de2f78 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/L2GAS.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/L2GAS.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/LT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/LT.png index f6fcdf112fa..59927272c47 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/LT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/LT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/LTE.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/LTE.png index 1af3baea07c..1b0e35cf290 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/LTE.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/LTE.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/MOV.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/MOV.png index 649465c7d04..4d79281a5e3 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/MOV.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/MOV.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/MSGSROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/MSGSROOT.png index 62aef1a1e30..d39c81f4564 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/MSGSROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/MSGSROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/NOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/NOT.png index 05ddd512807..34907da432a 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/NOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/NOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/NOTESROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/NOTESROOT.png index 9753651fe35..4a8d6b3d9fd 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/NOTESROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/NOTESROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/NULLIFIERSROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/NULLIFIERSROOT.png index e8de85ffe1c..fc8d2cd1050 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/NULLIFIERSROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/NULLIFIERSROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/OR.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/OR.png index ebc64c28a15..d796c0b5488 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/OR.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/OR.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/ORIGIN.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/ORIGIN.png index bcedd1c0a56..f712ff96084 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/ORIGIN.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/ORIGIN.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/PORTAL.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/PORTAL.png index 19957ac6c8e..2aa7a82f203 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/PORTAL.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/PORTAL.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/PUBLICDATAROOT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/PUBLICDATAROOT.png index 21a2fe9b538..44ddaf86bc2 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/PUBLICDATAROOT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/PUBLICDATAROOT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/REFUNDEE.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/REFUNDEE.png index f4f89b595f6..70ccb4c1f5a 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/REFUNDEE.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/REFUNDEE.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/RETURN.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/RETURN.png index c81b8566e8f..e1bf7d6a19d 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/RETURN.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/RETURN.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/REVERT.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/REVERT.png index f8de1fe51b4..67c12bd2d06 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/REVERT.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/REVERT.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SENDL2TOL1MSG.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SENDL2TOL1MSG.png index 0e0e529f3ab..04611cd6a43 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SENDL2TOL1MSG.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SENDL2TOL1MSG.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SET.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SET.png index 90b07c3bcbe..9d68daa851d 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SET.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SET.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SHL.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SHL.png index 78a1ca4e8e8..0c082bc62a6 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SHL.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SHL.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SHR.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SHR.png index 0260c3e29e4..ed85ae15c1f 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SHR.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SHR.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SLOAD.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SLOAD.png index edb4539d8ca..f9c6540667d 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SLOAD.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SLOAD.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SSTORE.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SSTORE.png index efc6e0272ec..5580d4237a7 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SSTORE.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SSTORE.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/STATICCALL.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/STATICCALL.png index 4c48dbd0eeb..e433c5063c3 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/STATICCALL.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/STATICCALL.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/SUB.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/SUB.png index 1a58fee072d..e6157763289 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/SUB.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/SUB.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/TIMESTAMP.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/TIMESTAMP.png index 2031f2f6306..8ac215faa81 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/TIMESTAMP.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/TIMESTAMP.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/ULOG.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/ULOG.png index c0f17fdb67b..1c414f29b53 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/ULOG.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/ULOG.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/VERSION.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/VERSION.png index 02b61144468..dc95ff9b5ca 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/VERSION.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/VERSION.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/bit-formats/XOR.png b/yellow-paper/docs/public-vm/gen/images/bit-formats/XOR.png index 7fbb6381452..83610f677e9 100644 Binary files a/yellow-paper/docs/public-vm/gen/images/bit-formats/XOR.png and b/yellow-paper/docs/public-vm/gen/images/bit-formats/XOR.png differ diff --git a/yellow-paper/docs/public-vm/gen/images/alu/alu.png b/yellow-paper/docs/public-vm/images/alu.png similarity index 100% rename from yellow-paper/docs/public-vm/gen/images/alu/alu.png rename to yellow-paper/docs/public-vm/images/alu.png diff --git a/yellow-paper/docs/public-vm/gen/images/control-flow/avm-control-flow.png b/yellow-paper/docs/public-vm/images/avm-control-flow.png similarity index 100% rename from yellow-paper/docs/public-vm/gen/images/control-flow/avm-control-flow.png rename to yellow-paper/docs/public-vm/images/avm-control-flow.png diff --git a/yellow-paper/docs/public-vm/gen/images/state-model/memory.png b/yellow-paper/docs/public-vm/images/memory.png similarity index 100% rename from yellow-paper/docs/public-vm/gen/images/state-model/memory.png rename to yellow-paper/docs/public-vm/images/memory.png diff --git a/yellow-paper/docs/public-vm/security.md b/yellow-paper/docs/public-vm/security.md new file mode 100644 index 00000000000..1ebc179d1fd --- /dev/null +++ b/yellow-paper/docs/public-vm/security.md @@ -0,0 +1,4 @@ +# VM threat model, security requirements + +An honest Prover must always be able to construct a satisfiable proof for an AVM program, even if the program throws an error. +This implies constraints produced by the AVM **must** be satisfiable. \ No newline at end of file diff --git a/yellow-paper/docs/public-vm/state-model.md b/yellow-paper/docs/public-vm/state-model.md index 615260cd686..cc92dc820da 100644 --- a/yellow-paper/docs/public-vm/state-model.md +++ b/yellow-paper/docs/public-vm/state-model.md @@ -1,12 +1,12 @@ -# The Aztec VM State Model +# State Model The goal of this note is to describe the VM state model and to specify "internal" VM abstractions that can be mapped to circuit designs. -# A memory-only state model +## A memory-only state model The AVM possesses three distinct data regions, accessed via distinct VM instructions: memory, calldata and returndata -![](./gen/images/state-model/memory.png) +![](./images/memory.png) All data regions are linear blocks of memory where each memory cell stores a finite field element. @@ -45,61 +45,130 @@ Indirect memory addressing is required in order to support read/writes into dyna Memory addresses must be tagged to be a `u32` type. -# Tagged memory +## Types and Tagged Memory -We define a `tag` to refer to the potential maximum value of a cell of main memory. The following tags are supported: +### Terminology/legend +- `M[X]`: main memory cell at offset `X` +- `tag`: a value referring to a memory cell's type (its maximum potential value) +- `T[X]`: the tag associated with memory cell at offset `X` +- `in-tag`: an instruction's tag to check input operands against. Present for many but not all instructions. +- `dst-tag`: the target type of a `CAST` instruction, also used to tag the destination memory cell +- `ADD`: shorthand for an `ADD` instruction with `in-tag = X` +- `ADD aOffset bOffset dstOffset`: an full `ADD` instruction with `in-tag = X`. See [here](./InstructionSet#isa-section-add) for more details. +- `CAST`: a `CAST` instruction with `dst-tag`: `X`. `CAST` is the only instruction with a `dst-tag`. See [here](./InstructionSet#isa-section-cast) for more details. -| tag value | maximum memory cell value | -| --------- | ------------------------- | -| 0 | 0 | -| 1 | $2^8 - 1$ | -| 2 | $2^{16} - 1$ | -| 3 | $2^{32} - 1$ | -| 4 | $2^{64} - 1$ | -| 5 | $2^{128} - 1$ | -| 6 | $p - 1$ | +### Tags and tagged memory -Note: $p$ describes the modulus of the finite field that the AVM circuit is defined over (i.e. number of points on the BN254 curve). +A `tag` refers to the maximum potential value of a cell of main memory. The following tags are supported: -The purpose of a tag is to inform the VM of the maximum possible length of an operand value that has been loaded from memory. +| tag value | maximum memory cell value | shorthand | +| --------- | ------------------------- | ------------- | +| 0 | 0 | uninitialized | +| 1 | $2^8 - 1$ | `u8` | +| 2 | $2^{16} - 1$ | `u16` | +| 3 | $2^{32} - 1$ | `u32` | +| 4 | $2^{64} - 1$ | `u64` | +| 5 | $2^{128} - 1$ | `u128` | +| 6 | $p - 1$ | `field` | +| 7 | reserved | reserved | -Multiple AVM instructions explicitly operate over range-constrained input parameters (e.g. ADD32). The maximum allowable value for an instruction's input parameters is defined via an _instruction tag_. Two potential scenarios result: +> Note: $p$ describes the modulus of the finite field that the AVM circuit is defined over (i.e. number of points on the BN254 curve). +> Note: `u32` is used for offsets into the VM's 32-bit addressable main memory -1. A VM instruction's tag value matches the input parameter tag values -2. A VM instruction's tag value does not match the input parameter tag values +The purpose of a tag is to inform the VM of the maximum possible length of an operand value that has been loaded from memory. -If case 2 is triggered, an error flag is raised. +#### Checking input operand tags ---- +Many AVM instructions explicitly operate over range-constrained input parameters (e.g. `ADD`). The maximum allowable value for an instruction's input parameters is defined via an `in-tag` (instruction/input tag). Two potential scenarios result: -### Writing into memory +1. A VM instruction's tag value matches the input parameter tag values +2. A VM instruction's tag value does _not_ match the input parameter tag values -It is required that all VM instructions that write into main memory explicitly define the tag of the output value and ensure the value is appropriately constrained to be consistent with the assigned tag. +If case 2 is triggered, an error flag is raised and the current call's execution reverts. ---- +#### Writing into memory -### MOV and tag conversions +It is required that all VM instructions that write into main memory explicitly define the tag of the destination value and ensure the value is appropriately constrained to be consistent with the assigned tag. You can see an instruction's "**Tag updates**" in its section of the instruction set document (see [here for `ADD`](./InstructionSet#isa-section-add) and [here for `CAST`](./InstructionSet#isa-section-cast)). -The MOV instruction copies data from between memory cell, perserving tags. +#### Standard tagging example: `ADD` -The only VM instruction that can be used to cast between tags is CAST. There are 2 modes to MOV: +``` +# ADD aOffset bOffset dstOffset +assert T[aOffset] == T[bOffset] == u32 // check inputs against in-tag, revert on mismatch +T[dstOffset] = u32 // tag destination with in-tag +M[dstOffset] = M[aOffset] + M[bOffset] // perform the addition +``` -1. The destination tag describes a maximum value that is _less than_ the source tag -2. The destination tag describes a maximum value that is _greater than or equal to_ the source tag +#### `MOV` and tag preservation -For Case 1, range constraints must be applied to ensure the destination value is consistent with the source value after tag truncations have been applied. - -Case 2 is trivial as no additional consistency checks must be performed between soruce and destination values. +The `MOV` instruction copies data from one memory cell to another, preserving tags. In other words, the destination cell's tag will adopt the value of the source: +``` +# MOV srcOffset dstOffset +T[dstOffset] = T[srcOffset] // preserve tag +M[dstOffset] = M[srcOffset] // perform the move +``` ---- +Note that `MOV` does not have an `in-tag` and therefore does not need to make any assertions regarding the source memory cell's type. -### Calldata/returndata and tag conversions +#### `CAST` and tag conversions -All elements in calldata/returndata are implicitly tagged as field elements (i.e. maximum value is $p - 1$). To perform a tag conversion, calldata/returndata must be copied into main memory, followed by an appropriate MOV instruction. +The only VM instruction that can be used to cast between tags is `CAST`. Two potential scenarios result: -## VM threat model, security requirements +1. The destination tag describes a maximum value that is _less than_ the source tag +2. The destination tag describes a maximum value that is _greater than or equal to_ the source tag -TODO: move this somewhere else, doesn't quite fit. +For Case 1, range constraints must be applied to ensure the destination value is consistent with the source value after tag truncations have been applied. -An honest Prover must always be able to construct a satsisfiable proof for an AVM program, even if the program throws an error. -This implies constraints produced by the AVM **must** be satisfiable. +Case 2 is trivial as no additional consistency checks must be performed between source and destination values. + +``` +# CAST srcOffset dstOffset +T[dstOffset] = u64 // tag destination with dst-tag +M[dstOffset] = cast(M[srcOffset]) // perform cast +``` + +#### Indirect `MOV` and extra tag checks + +A `MOV` instruction may flag its source and/or destination offsets as "indirect". An indirect memory access performs `M[M[offset]]` instead of the standard `M[offset]`. Memory offsets must be `u32`s since main memory is a 32-bit addressable space, and so indirect memory accesses include additional checks. + +Additional checks for a `MOV` with an indirect source offset: +``` +# MOV srcOffset dstOffset // with indirect source +assert T[srcOffset] == u32 // enforce that `M[srcOffset]` is itself a valid memory offset +T[dstOffset] = T[T[srcOffset]] // tag destination to match indirect source tag +M[dstOffset] = M[M[srcOffset]] // perform move from indirect source +``` + +Additional checks for a `MOV` with an indirect destination offset: +``` +# MOV srcOffset dstOffset // with indirect destination +assert T[dstOffset] == u32 // enforce that `M[dstOffset]` is itself a valid memory offset +T[T[dstOffset]] = T[srcOffset] // tag indirect destination to match source tag +M[M[dstOffset]] = M[srcOffset] // perform move to indirect destination +``` + +Additional checks for a `MOV` with both indirect source and destination offsets: +``` +# MOV srcOffset dstOffset // with indirect source and destination +assert T[srcOffset] == T[dstOffset] == u32 // enforce that `M[*Offset]` are valid memory offsets +T[T[dstOffset]] = T[T[srcOffset]] // tag indirect destination to match indirect source tag +M[M[dstOffset]] = M[M[srcOffset]] // perform move to indirect destination +``` + +#### Calldata/returndata and tag conversions + +All elements in calldata/returndata are implicitly tagged as field elements (i.e. maximum value is $p - 1$). To perform a tag conversion, calldata/returndata must be copied into main memory (via [`CALLDATACOPY`](./InstructionSet#isa-section-calldatacopy) or [`RETURN`'s `offset` and `size`](./InstructionSet#isa-section-return)), followed by an appropriate `CAST` instruction. +``` +# Copy calldata to memory and cast a word to u64 +CALLDATACOPY cdOffset size offsetA // copy calldata to memory at offsetA +CAST offsetA dstOffset // cast first copied word to a u64 +``` +This would perform the following: +``` +# CALLDATACOPY cdOffset size offsetA +T[offsetA:offsetA+size] = field // CALLDATACOPY assigns the field tag +M[offsetA:offsetA+size] = calldata[cdOffset:cdOffset+size] // copy calldata to memory +# CAST offsetA dstOffset +T[offsetA] = u64 // CAST assigns a new tag +M[dstOffset] = cast(offsetA) // perform the cast operation +``` \ No newline at end of file diff --git a/yellow-paper/docs/public-vm/tagged-memory.md b/yellow-paper/docs/public-vm/tagged-memory.md deleted file mode 100644 index a72c693da93..00000000000 --- a/yellow-paper/docs/public-vm/tagged-memory.md +++ /dev/null @@ -1,60 +0,0 @@ -# Tagged Memory - An instruction-set centric explanation - -## Explanation of Tagged Memory -Every word in memory will have an associated `type-tag` (unset, u8, u16, u32, u64, u128, field). For memory address `a`, we refer to the corresponding memory word's `type-tag` as `T[a]`. - -Every instruction will be flagged with an `op-type` in bytecode (u8, u16, u32, u64, u128, field). - -If an instruction uses a "source operand" as a memory location (e.g. `z = M[s0] + y`), the VM first retrieves the `type-tag` referenced by the operand (`T[s0]`) and enforces that it matches `op-type`. The VM enforces this for all source operands used for direct memory reads. - -If an instruction uses a "dest operand" as a memory location (e.g. `M[d0] = x + y`), when the VM assigns a word to that memory location, it also assigns the corresponding `type-tag` (`T[d0] = op-type`). The VM performs this tag assignment for all dest operands used for direct memory writes. - -**If an instruction fails any of its operand type-tag-checks, the current call's execution reverts!** - -### `ADD<32>` example -`ADD<32>` is an `ADD` instruction with `op-type` u32. As elaborated on later, an `ADD` performs `M[d0] = M[s0] + M[s1]`. In this case, both `s0` and `s1` are "source operands" used for direct memory reads to retrieve inputs to an addition. So, the VM enforces the `op-type(u32) == T[s0] == T[s1]`. `d0` here is a "dest operand" used for a direct memory write to store the output of the addition. So, the VM tags memory location `d0` with `type-tag` of u32: `T[d0] = op-type(u32)`. - -Here is a summary of what is happening for `ADD<32>`: -``` -assert T[s0] == u32 // enforce that source memory locations' type-tags == op-type -assert T[s1] == u32 -T[d0] = u32 // tag destination memory location as op-type -M[d0] = M[s0] + M[s1] -``` - - -### Type tags and `CASTs` - -`CAST` is different from other instructions in that it will be flagged with an additional `dest-type`. So, a `CAST` will assign `dest-type` (instead of `op-type`) to the memory location specified by its "dest operand" `d0`. `CAST<32, 64>` enforces that `T[s0]` matches u32 (the `op-type`) and assigns `T[d0] = u64` (the `dest-type`). - -Here is a summary of what is happening for a `CAST<32, 64>`: -``` -assert T[s0] == u32 // enforce that source memory location's type-tags == op-type -T[d0] = u64 // tag destination memory location as dest-type -M[d0] = M[s0] -``` - -### Type tags and indirect `MOVs` -A basic `MOV` instruction performs direct memory accesses and operates in the same as a simple `ADD` instruction as outlined above. A simple `MOV<64>` would do: -``` -assert T[s0] == u64 // enforce that source memory location's type-tag == op-type -T[d0] = u64 // tag destination memory location with op-type -M[d0] = M[s0] -``` - -Consider a `MOV<64, s0-indirect>`, which treats s0 as an indirect memory pointer to perform `M[d0] = M[M[s0]]`. Here, the VM first needs to enforce that `M[s0]` is a valid memory address (has type u32), and it then needs to perform the standard check that resulting word has type matching `op-type`: -``` -assert T[s0] == u32 // enforce that the direct source memory location contains a valid address (type-tag == u32) -assert T[M[s0]] == u64 // enforce that the indirect source memory location's type-tag == op-type -T[d0] = u64 // tag destination memory location with op-type -M[d0] = M[M[s0]] -``` - -Similarly, a `MOV<64, d0-indirect>` treats d0 as an indirect memory pointer to perform `M[M[d0]] = M[s0]`, and here the VM first needs to enforce that `M[d0]` is a valid memory address (has type u32) before assigning the destination location its type tag: -``` -assert T[s0] == u64 // enforce that source memory location's type-tag == op-type -assert T[d0] == u32 // enforce that the direct destination memory location contains a valid address (type-tag == u32) -T[M[d0]] = u64 // tag indirect destination memory location with op-type -M[M[d0]] = M[s0] -``` - diff --git a/yellow-paper/docs/state/index.md b/yellow-paper/docs/state/index.md new file mode 100644 index 00000000000..edb6a3ca7fd --- /dev/null +++ b/yellow-paper/docs/state/index.md @@ -0,0 +1,13 @@ +--- +title: State +--- + +# State + +Global state in the Aztec Network is represented by a set of Merkle trees: the [Note Hash tree](./note_hash_tree.md), [Nullifier tree](./nullifier_tree.md), and [Public Data tree](./public_data_tree.md) reflect the latest state of the chain. + +Merkle trees are either [append-only](./tree_impls.md#append-only-merkle-trees), for storing immutable data, or [indexed](./tree_impls.md#indexed-merkle-trees), for storing data that requires proofs of non-membership. + +import DocCardList from '@theme/DocCardList'; + + diff --git a/yellow-paper/docs/state/note_hash_tree.md b/yellow-paper/docs/state/note_hash_tree.md new file mode 100644 index 00000000000..383f974fca4 --- /dev/null +++ b/yellow-paper/docs/state/note_hash_tree.md @@ -0,0 +1,25 @@ +# Note Hash Tree + +The Note Hash tree is an [append-only Merkle tree](./tree_impls.md#append-only-merkle-trees) that stores siloed note hashes as its elements. Each element in the tree is a 254-bit altBN-254 scalar field element. This tree is part of the global state, and allows to prove existence of private notes via Merkle membership proofs. + +Note commitments are immutable once created, since notes cannot be modified. Still, notes can be consumed, which means they can no longer be used. To preserve privacy, a consumed note is not removed from the tree, otherwise it would be possible to link the transaction that created a note with the one that consumed it. Instead, a note is consumed by emitting a deterministic [nullifier](./nullifier_tree.md). + +Contracts emit new note commitments via the `new_commitments` in the `CircuitPublicInputs`, which are subsequently [siloed](./tree_impls.md#siloing-leaves) per contract by the Kernel circuit. Siloing the commitment ensures that a contract cannot emit a commitment for a note that could be used for a different contract. + +The Kernel circuit also guarantees uniqueness of commitments by further hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would flag the second one as nullified as well. + +The pseudocode for siloing and making a commitment unique is the following, where each `hash` operation is a Pedersen hash with a unique generator index, indicated by the constant in all caps. + +``` +fn compute_unique_siloed_commitment(commitment, contract, transaction): + let siloed_commitment = hash([contract, commitment], SILOED_COMMITMENT) + let index = index_of(commitment, transaction.commitments) + let nonce = hash([transaction.tx_hash, index], COMMITMENT_NONCE) + return hash([nonce, siloed_commitment], UNIQUE_COMMITMENT) +``` + +The unique siloed commitment of a note is included in the [transaction `data`](../transactions/tx-object.md), and then included into the Note Hash tree by the sequencer as the transaction is included in a block. + +The protocol does not enforce any constraints to the commitment emitted by an application. This means that applications are responsible for including a `randomness` field in the note hash to make the commitment _hiding_ in addition to _binding_. If an application does not include randomness, and the note preimage can be guessed by an attacker, it makes the note vulnerable to preimage attacks, since the siloing and uniqueness steps do not provide hiding. + +Furthermore, since there are no constraints to the commitment emitted by an application, an application can emit any value whatsoever as a `new_commitment`, including values that do not map to a note hash. diff --git a/yellow-paper/docs/state/nullifier_tree.md b/yellow-paper/docs/state/nullifier_tree.md new file mode 100644 index 00000000000..02fd697ab0a --- /dev/null +++ b/yellow-paper/docs/state/nullifier_tree.md @@ -0,0 +1,20 @@ +# Nullifier Tree + +The Nullifier tree is an [indexed Merkle tree](./tree_impls.md#indexed-merkle-trees) that stores nullifier values. Each value stored in the tree is a 254-bit altBN-254 scalar field element. This tree is part of the global state, and allows to prove non-existence of a nullifier when a note is consumed. + +Nullifiers are asserted to be unique during insertion, by checking that the inserted value is not equal to the value and next-value stored in the prior node in the indexed tree. Any attempt to insert a duplicated value is rejected. + +Contracts emit new nullifiers via the `new_nullifiers` in the `CircuitPublicInputs`. Same as elements in the [Note Hash tree](./note_hash_tree.md), nullifiers are [siloed](./tree_impls.md#siloing-leaves) per contract by the Kernel circuit before being inserted in the tree, which ensures that a contract cannot emit nullifiers that affect other contracts. + +``` +fn compute_siloed_nullifier(nullifier, contract): + return hash([contract, nullifier], OUTER_NULLIFIER) +``` + +Nullifiers are primarily used for privately marking notes as consumed. When a note is consumed in an application, the application computes and emits a deterministic nullifier associated to the note. If a user attempts to consume the same note more than once, the same nullifier will be generated, and will be rejected on insertion by the nullifier tree. + +Nullifiers provide privacy by being computed using a deterministic secret value, such as the owner siloed nullifier secret key, or a random value stored in an encrypted note. This ensures that, without knowledge of the secret value, it is not possible to calculate the associated nullifier, and thus it is not possible to link a nullifier to its associated note commitment. + +Applications are not constrained by the protocol on how the nullifier for a note is computed. It is responsibility of the application to guarantee determinism in calculating a nullifier, otherwise the same note could be spent multiple times. + +Furthermore, nullifiers can be emitted by an application just to ensure that an action can be executed only once, such as initializing a value, and are not required to be linked to a note commitment. \ No newline at end of file diff --git a/yellow-paper/docs/state/public_data_tree.md b/yellow-paper/docs/state/public_data_tree.md new file mode 100644 index 00000000000..7af2769b5ec --- /dev/null +++ b/yellow-paper/docs/state/public_data_tree.md @@ -0,0 +1,19 @@ +# Public Data Tree + +The Public Data tree is an [indexed Merkle tree](./tree_impls.md#indexed-merkle-trees) that stores public-state key-value data. Each item stored in the tree is a key-value pair, where both key and value are 254-bit altBN-254 scalar field elements. Items are sorted based on their key, so each indexed tree leaf contains a tuple with the key, the value, the next higher key, and the index in the tree for the next higher key. This tree is part of the global state, and is updated by the sequencer during the execution of public functions. + +The Public Data tree is implemented using an indexed Merkle tree instead of a sparse Merkle tree in order to reduce the tree height. A lower height means shorter membership proofs. + +Keys in the Public Data tree are [siloed](./tree_impls.md#siloing-leaves) using the contract address, to prevent a contract from overwriting public state for another contract. + +``` +fn compute_siloed_public_data_item(key, value, contract): + let siloed_key = hash([contract, key], PUBLIC_DATA_LEAF) + return [siloed_key, value] +``` + +When reading a key from the Public Data tree, the key may or may not be present. If the key is not present, then a non-membership proof is produced, and the value is assumed to be zero. When a key is written to, either a new node is appended to the tree if the key was not present, or its value is overwritten if it was. + +Public functions can read from or write to the Public Data tree by emitting `contract_storage_read` and `contract_storage_update_requests` in the `PublicCircuitPublicInputs`. The Kernel circuit then siloes these requests per contract. + +Contracts can store arbitrary data at a given key, which is always stored as a single field element. Applications are responsible for interpreting this data. Should an application need to store data larger than a single field element, they are responsible for partitioning it across multiple keys. diff --git a/yellow-paper/docs/state/tree_impls.md b/yellow-paper/docs/state/tree_impls.md new file mode 100644 index 00000000000..0a698eb2de4 --- /dev/null +++ b/yellow-paper/docs/state/tree_impls.md @@ -0,0 +1,25 @@ +# Tree implementations + +Aztec relies on two Merkle tree implementations in the protocol: append-only and indexed Merkle trees. + +## Append-only Merkle trees + +In an append-only Merkle tree new leaves are inserted in order from left to right. Existing leaf values are immutable and cannot be modified. These tree are useful to represent historic data, as new entries are added as new transactions and blocks are processed, and historic data is not altered. + +Append-only trees allow for more efficient syncing than sparse trees, since clients can sync from left to right starting with their last known value. Updates to the tree root from new leaves can be computed just by keeping the rightmost boundary of the tree, and batch insertions can be computed with fewer hashes than in a sparse tree. Append-only trees also provide cheap historic snapshots, as older roots can be computed by completing the merkle path from a past left subtree with an empty right subtree. + +## Indexed Merkle trees + +Indexed Merkle trees, introduced [here](https://eprint.iacr.org/2021/1263.pdf), allow for proofs of non-inclusion more efficiently than sparse Merkle trees. Each leaf in the tree is a tuple with the leaf value, the next higher value in the tree, and the index of the leaf where that value is stored. New nodes are inserted left to right, as in the append-only tree, but existing nodes can be modified to update the next value and its pointer. Indexed Merkle trees behave as a Merkle tree over a sorted linked list. + +Assuming the indexed Merkle tree invariants hold, proving non-membership of a value `x` then requires a membership proof of the node with value lower than `x` and a next higher value greater than `x`. The cost of this proof is proportional to the height of the tree, which can be set according to the expected number of elements to be stored in the tree. For comparison, a non-membership proof in a sparse tree requires a tree with height proportional to the size of the elements, so when working with 256-bit elements, 256 hashes are required for a proof. + +Refer to [this page](https://docs.aztec.network/concepts/advanced/data_structures/indexed_merkle_tree) for more details on how insertions, updates, and membership proofs are executed on an Indexed Merkle tree. + + + +## Siloing leaves + +In several trees in the protocol we indicate that its leaves are "siloed". This refers to hashing the leaf value with a siloing value before inserting it in the tree. The siloing value is typically an identifier of the contract that produced the value. This allows us to store disjoint "domains" within the same tree, ensuring that a value emitted from one domain cannot affect others. + +To guarantee the siloing of leaf values, siloing is performed by a trusted protocol circuit, such as the kernel or rollup circuits, and not by an application circuit. Siloing is performed by Pedersen hashing the contract address and the value. diff --git a/yellow-paper/docs/transactions/tx-object.md b/yellow-paper/docs/transactions/tx-object.md index ccfa2f7d4a7..95b736766c4 100644 --- a/yellow-paper/docs/transactions/tx-object.md +++ b/yellow-paper/docs/transactions/tx-object.md @@ -45,7 +45,7 @@ Output of the last iteration of the private kernel circuit. Includes _accumulate | nullifierTreeRoot | Field | Root of the nullifier tree at the time of when this information was assembled. | | contractTreeRoot | Field | Root of the contract tree at the time of when this information was assembled. | | l1ToL2MessagesTreeRoot | Field | Root of the L1 to L2 messages tree at the time of when this information was assembled. | -| blocksTreeRoot | Field | Root of the historic blocks tree at the time of when this information was assembled. | +| archiveRoot | Field | Root of the archive at the time of when this information was assembled. | | privateKernelVkTreeRoot | Field | Root of the private kernel VK tree at the time of when this information was assembled (future enhancement). | | publicDataTreeRoot | Field | Current public state tree hash. | | globalVariablesHash | Field | Previous globals hash, this value is used to recalculate the block hash. | diff --git a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js index 578ac938aaf..19acb3759ed 100644 --- a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js +++ b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js @@ -7,8 +7,8 @@ const TOPICS_IN_SECTIONS = [ "Name", "Summary", "Category", "Flags", "Args", "Expression", "Details", "Tag checks", "Tag updates", "Bit-size", ]; -const OP_TYPE_DESCRIPTION = "The [type/size](./Types) to check inputs against and tag the output with."; -const DEST_TYPE_DESCRIPTION = "The [type/size](./Types) to tag the output with when different from `op-type`."; +const IN_TAG_DESCRIPTION = "The [tag/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with."; +const DST_TAG_DESCRIPTION = "The [tag/size](./state-model#tags-and-tagged-memory) to tag the destination with but not to check inputs against."; const INDIRECT_FLAG_DESCRIPTION = "Toggles whether each memory-offset argument is an indirect offset. 0th bit corresponds to 0th offset arg, etc. Indirect offsets result in memory accesses like `M[M[offset]]` instead of the more standard `M[offset]`."; const INSTRUCTION_SET_RAW = [ @@ -18,7 +18,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "arithmetic", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -30,8 +30,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] + M[bOffset] mod 2^k`", "Summary": "Addition (a + b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "sub", @@ -39,7 +39,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "arithmetic", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -51,8 +51,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] - M[bOffset] mod 2^k`", "Summary": "Subtraction (a - b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "div", @@ -60,7 +60,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "arithmetic", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -72,8 +72,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] / M[bOffset]`", "Summary": "Unsigned division (a / b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "eq", @@ -81,7 +81,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "conditional", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -93,8 +93,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] == M[bOffset] ? 1 : 0`", "Summary": "Equality check (a == b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "lt", @@ -102,7 +102,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "conditional", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -114,8 +114,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] < M[bOffset] ? 1 : 0`", "Summary": "Less-than check (a < b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "lte", @@ -123,7 +123,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "conditional", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -135,8 +135,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] <= M[bOffset] ? 1 : 0`", "Summary": "Less-than-or-equals check (a <= b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "and", @@ -144,7 +144,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -156,8 +156,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] AND M[bOffset]`", "Summary": "Bitwise AND (a & b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "or", @@ -165,7 +165,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -177,8 +177,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] OR M[bOffset]`", "Summary": "Bitwise OR (a | b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "xor", @@ -186,7 +186,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -198,8 +198,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] XOR M[bOffset]`", "Summary": "Bitwise XOR (a ^ b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "not", @@ -207,7 +207,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "1", "#memwrites": "1", @@ -218,8 +218,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = NOT M[aOffset]`", "Summary": "Bitwise NOT (inversion)", "Details": "", - "Tag checks": "`T[aOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "shl", @@ -227,7 +227,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -239,8 +239,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] << M[bOffset]`", "Summary": "Bitwise leftward shift (a << b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "shr", @@ -248,7 +248,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "bitwise", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": OP_TYPE_DESCRIPTION}, + {"name": "in-tag", "description": IN_TAG_DESCRIPTION}, ], "#memreads": "2", "#memwrites": "1", @@ -260,8 +260,8 @@ const INSTRUCTION_SET_RAW = [ "Expression": "`M[dstOffset] = M[aOffset] >> M[bOffset]`", "Summary": "Bitwise rightward shift (a >> b)", "Details": "", - "Tag checks": "`T[aOffset] == T[bOffset] == op-type`", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag checks": "`T[aOffset] == T[bOffset] == in-tag`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "cast", @@ -269,7 +269,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "types", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "dest-type", "description": DEST_TYPE_DESCRIPTION}, + {"name": "dst-tag", "description": DST_TAG_DESCRIPTION}, ], "#memreads": "1", "#memwrites": "1", @@ -277,11 +277,11 @@ const INSTRUCTION_SET_RAW = [ {"name": "aOffset", "description": "memory offset of word to cast"}, {"name": "dstOffset", "description": "memory offset specifying where to store operation's result"}, ], - "Expression": "`M[dstOffset] = cast(M[aOffset])`", + "Expression": "`M[dstOffset] = cast(M[aOffset])`", "Summary": "Type cast", - "Details": "Cast a word in memory based on the `dest-type` specified in the bytecode. Truncates when casting to a smaller type, left-zero-pads when casting to a larger type.", + "Details": "Cast a word in memory based on the `dst-tag` specified in the bytecode. Truncates (`M[dstOffset] = M[aOffset] mod 2^dstsize`) when casting to a smaller type, left-zero-pads when casting to a larger type. See [here](./state-model#cast-and-tag-conversions) for more details.", "Tag checks": "", - "Tag updates": "`T[dstOffset] = dest-type`", + "Tag updates": "`T[dstOffset] = dst-tag`", }, { "id": "set", @@ -289,7 +289,7 @@ const INSTRUCTION_SET_RAW = [ "Category": "memory", "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, - {"name": "op-type", "description": "The [type/size](./Types) to check inputs against and tag the output with. `field` type is NOT supported for SET."}, + {"name": "in-tag", "description": "The [type/size](./state-model#tags-and-tagged-memory) to check inputs against and tag the destination with. `field` type is NOT supported for SET."}, ], "#memreads": "0", "#memwrites": "1", @@ -299,9 +299,9 @@ const INSTRUCTION_SET_RAW = [ ], "Expression": "`M[dstOffset] = const`", "Summary": "Set a memory word from a constant in the bytecode.", - "Details": "Set memory word at `dstOffset` to `const`'s immediate value. `const`'s bit-size (N) can be 8, 16, 32, 64, or 128 based on `op-type`. It _cannot be 254 (`field` type)_!", + "Details": "Set memory word at `dstOffset` to `const`'s immediate value. `const`'s bit-size (N) can be 8, 16, 32, 64, or 128 based on `in-tag`. It _cannot be 254 (`field` type)_!", "Tag checks": "", - "Tag updates": "`T[dstOffset] = op-type`", + "Tag updates": "`T[dstOffset] = in-tag`", }, { "id": "mov", @@ -354,14 +354,14 @@ const INSTRUCTION_SET_RAW = [ "#memwrites": "`s1`", "Args": [ {"name": "cdOffset", "description": "offset into calldata to copy from"}, - {"name": "size", "description": "number of words to copy", "mode": "immediate", "type": "u24"}, + {"name": "copySize", "description": "number of words to copy", "mode": "immediate", "type": "u32"}, {"name": "dstOffset", "description": "memory offset specifying where to copy the first word to"}, ], - "Expression": "`M[dstOffset:dstOffset+size] = calldata[cdOffset:cdOffset+size]`", + "Expression": "`M[dstOffset:dstOffset+copySize] = calldata[cdOffset:cdOffset+copySize]`", "Summary": "Copy calldata into memory.", "Details": "Calldata is read-only and cannot be directly operated on by other instructions. This instruction moves words from calldata into memory so they can be operated on normally.", "Tag checks": "", - "Tag updates": "`T[dstOffset:dstOffset+size] = field`", + "Tag updates": "`T[dstOffset:dstOffset+copySize] = field`", }, { "id": "sload", @@ -463,7 +463,7 @@ const INSTRUCTION_SET_RAW = [ "#memreads": "0", "#memwrites": "0", "Args": [ - {"name": "loc", "description": "target location to jump to", "mode": "immediate", "type": "u24"}, + {"name": "loc", "description": "target location to jump to", "mode": "immediate", "type": "u32"}, ], "Expression": "`PC = loc`", "Summary": "Jump to a location in the bytecode.", @@ -481,7 +481,7 @@ const INSTRUCTION_SET_RAW = [ "#memreads": "3", "#memwrites": "0", "Args": [ - {"name": "loc", "description": "target location conditionally jump to", "mode": "immediate", "type": "u24"}, + {"name": "loc", "description": "target location conditionally jump to", "mode": "immediate", "type": "u32"}, {"name": "condOffset", "description": "memory offset of the operations 'conditional' input"}, ], "Expression": "`PC = M[condOffset] > 0 ? loc : PC`", @@ -500,10 +500,10 @@ const INSTRUCTION_SET_RAW = [ "#memreads": "`s1`", "#memwrites": "0", "Args": [ - {"name": "offset", "description": "memory offset of first word to return"}, - {"name": "size", "description": "number of words to return", "mode": "immediate", "type": "u24"}, + {"name": "retOffset", "description": "memory offset of first word to return"}, + {"name": "retSize", "description": "number of words to return", "mode": "immediate", "type": "u32"}, ], - "Expression": "`return(M[offset:offset+size])`", + "Expression": "`return(M[retOffset:retOffset+retSize])`", "Summary": "Halt execution with `success`, optionally returning some data.", "Details": "Return control flow to the calling context/contract.", "Tag checks": "", @@ -519,10 +519,10 @@ const INSTRUCTION_SET_RAW = [ "#memreads": "`s1`", "#memwrites": "0", "Args": [ - {"name": "offset", "description": "memory offset of first word to return"}, - {"name": "size", "description": "number of words to return", "mode": "immediate", "type": "u24"}, + {"name": "retOffset", "description": "memory offset of first word to return"}, + {"name": "retSize", "description": "number of words to return", "mode": "immediate", "type": "u32"}, ], - "Expression": "`revert(M[offset:offset+size])`", + "Expression": "`revert(M[retOffset:retOffset+retSize])`", "Summary": "Halt execution with `failure`, reverting state changes and optionally returning some data.", "Details": "Return control flow to the calling context/contract.", "Tag checks": "", @@ -535,21 +535,20 @@ const INSTRUCTION_SET_RAW = [ "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, ], - "#memreads": "5", + "#memreads": "7", "#memwrites": "`1+retSize`", "Args": [ - {"name": "l1GasOffset", "description": "amount of L1 gas to provide to the callee"}, - {"name": "l2GasOffset", "description": "amount of L2 gas to provide to the callee"}, + {"name": "gasOffset", "description": "offset to two words containing `{l1Gas, l2Gas}`: amount of L1 and L2 gas to provide to the callee"}, {"name": "addrOffset", "description": "address of the contract to call"}, {"name": "argsOffset", "description": "memory offset to args (will become the callee's calldata)"}, - {"name": "argsSize", "description": "number of words to pass via callee's calldata", "mode": "immediate", "type": "u24"}, + {"name": "argsSize", "description": "number of words to pass via callee's calldata", "mode": "immediate", "type": "u32"}, {"name": "retOffset", "description": "destination memory offset specifying where to store the data returned from the callee"}, - {"name": "retSize", "description": "number of words to copy from data returned by callee", "mode": "immediate", "type": "u24"}, + {"name": "retSize", "description": "number of words to copy from data returned by callee", "mode": "immediate", "type": "u32"}, {"name": "successOffset", "description": "destination memory offset specifying where to store the call's success (0: failure, 1: success)", "type": "u8"}, ], "Expression":` M[successOffset] = call( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize]) `, @@ -557,7 +556,7 @@ M[successOffset] = call( "Details": `Creates a new CallContext, triggers execution of the corresponding contract code, and then resumes execution in the current CallContext. A non-existent contract or one with no code will return success. Nested call has an incremented \`CallContext.calldepth\`.`, - "Tag checks": "`T[l1GasOffset] == T[l2GasOffset] == u32`", + "Tag checks": "`T[gasOffset] == T[gasOffset+1] == u32`", "Tag updates": ` T[successOffset] = u8 T[retOffset:retOffset+retSize] = field @@ -570,27 +569,26 @@ T[retOffset:retOffset+retSize] = field "Flags": [ {"name": "indirect", "description": INDIRECT_FLAG_DESCRIPTION}, ], - "#memreads": "5", + "#memreads": "7", "#memwrites": "`1+retSize`", "Args": [ - {"name": "l1GasOffset", "description": "amount of L1 gas to provide to the callee"}, - {"name": "l2GasOffset", "description": "amount of L2 gas to provide to the callee"}, + {"name": "gasOffset", "description": "offset to two words containing `{l1Gas, l2Gas}`: amount of L1 and L2 gas to provide to the callee"}, {"name": "addrOffset", "description": "address of the contract to call"}, {"name": "argsOffset", "description": "memory offset to args (will become the callee's calldata)"}, - {"name": "argsSize", "description": "number of words to pass via callee's calldata", "mode": "immediate", "type": "u24"}, + {"name": "argsSize", "description": "number of words to pass via callee's calldata", "mode": "immediate", "type": "u32"}, {"name": "retOffset", "description": "destination memory offset specifying where to store the data returned from the callee"}, - {"name": "retSize", "description": "number of words to copy from data returned by callee", "mode": "immediate", "type": "u24"}, + {"name": "retSize", "description": "number of words to copy from data returned by callee", "mode": "immediate", "type": "u32"}, {"name": "successOffset", "description": "destination memory offset specifying where to store the call's success (0: failure, 1: success)", "type": "u8"}, ], "Expression": ` M[successOffset] = staticcall( - M[l1GasOffset], M[l2GasOffset], M[addrOffset], + M[gasOffset], M[gasOffset+1], M[addrOffset], M[argsOffset], M[argsSize], M[retOffset], M[retSize]) `, "Summary": "Call into another contract, disallowing persistent state modifications.", "Details": "Same as `CALL`, but the callee is cannot modify persistent state. Disallowed instructions are `SSTORE`, `ULOG`, `CALL`.", - "Tag checks": "`T[l1GasOffset] == T[l2GasOffset] == u32`", + "Tag checks": "`T[gasOffset] == T[gasOffset+1] == u32`", "Tag updates": ` T[successOffset] = u8 T[retOffset:retOffset+retSize] = field @@ -606,10 +604,10 @@ T[retOffset:retOffset+retSize] = field "#memreads": "`s1`", "#memwrites": "0", "Args": [ - {"name": "offset", "description": "memory offset of the data to log"}, - {"name": "size", "description": "number of words to log", "mode": "immediate", "type": "u24"}, + {"name": "logOffset", "description": "memory offset of the data to log"}, + {"name": "logSize", "description": "number of words to log", "mode": "immediate", "type": "u32"}, ], - "Expression": "`ulog(M[offset:offset+size])`", + "Expression": "`ulog(M[logOffset:logOffset+logSize])`", "Summary": "Emit an unencrypted log with data from the `field` memory page", "Details": "", "Tag checks": "", @@ -887,7 +885,7 @@ T[retOffset:retOffset+retSize] = field {"name": "blockNumOffset", "description": "memory offset of the block number input"}, {"name": "dstOffset", "description": "memory offset specifying where to store operation's result"}, ], - "Expression": "`M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root`", + "Expression": "`M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root`", "Summary": "Get the historical blocks tree root as of the specified block number.", "Details": "", "Tag checks": "", diff --git a/yellow-paper/src/preprocess/InstructionSet/InstructionSize.js b/yellow-paper/src/preprocess/InstructionSet/InstructionSize.js index 0cefffcf327..f6cac342791 100644 --- a/yellow-paper/src/preprocess/InstructionSet/InstructionSize.js +++ b/yellow-paper/src/preprocess/InstructionSet/InstructionSize.js @@ -1,7 +1,8 @@ const OPCODE_SIZE = 8; const FLAG_SIZE = 8; +const RESERVED_SIZE = 8; -const DEFAULT_OPERAND_SIZE = 24; // for direct/indirect memory offsets +const DEFAULT_OPERAND_SIZE = 32; // for direct/indirect memory offsets function argSize(arg) { if (arg['mode'] && arg['mode'] == 'immediate') { @@ -31,7 +32,7 @@ function toOpcode(index) { * 1 byte for dest-type */ function instructionSize(instr) { - let size = OPCODE_SIZE; + let size = OPCODE_SIZE + RESERVED_SIZE; let numUntypedImmediates = 0; for (let arg of instr['Args']) { const aSize = argSize(arg); @@ -59,6 +60,9 @@ function instructionBitFormat(instr, index) { 'code': toOpcode(index), 'size': OPCODE_SIZE, }, + 'Reserved': { + 'size': RESERVED_SIZE, + }, 'Args': [], 'Flags': [], }; diff --git a/yellow-paper/src/preprocess/InstructionSet/genBitFormats.js b/yellow-paper/src/preprocess/InstructionSet/genBitFormats.js index 99bdc4c34f8..c406f70a85e 100644 --- a/yellow-paper/src/preprocess/InstructionSet/genBitFormats.js +++ b/yellow-paper/src/preprocess/InstructionSet/genBitFormats.js @@ -10,6 +10,7 @@ function run() { const bitFormat = instructionBitFormat(instr, i); formats.push(bitFormat); } + console.log(`Writing ${formats.length} bit formats to InstructionBitFormats.json`); fs.writeFileSync('./InstructionBitFormats.json', JSON.stringify(formats)); } run(); \ No newline at end of file diff --git a/yellow-paper/src/preprocess/InstructionSet/InstructionSetMarkdownGen.js b/yellow-paper/src/preprocess/InstructionSet/genMarkdown.js similarity index 96% rename from yellow-paper/src/preprocess/InstructionSet/InstructionSetMarkdownGen.js rename to yellow-paper/src/preprocess/InstructionSet/genMarkdown.js index b9a689749cd..e26940ef03f 100644 --- a/yellow-paper/src/preprocess/InstructionSet/InstructionSetMarkdownGen.js +++ b/yellow-paper/src/preprocess/InstructionSet/genMarkdown.js @@ -19,7 +19,7 @@ function stripBraces(str) { function instructionSetPreface() { let preface = "[comment]: # (THIS IS A GENERATED FILE! DO NOT EDIT!)\n"; preface += "[comment]: # (Generated via `yarn preprocess`)\n\n"; - preface += "[comment]: # (Generated by InstructionSetMarkdownGen.tsx and InstructionSet.js)\n\n"; + preface += "[comment]: # (Generated by genMarkdown.js, InstructionSet.js, InstructionSize.js)\n\n"; preface += "import Markdown from 'react-markdown'\n"; preface += "import CodeBlock from '@theme/CodeBlock'\n\n"; return preface; @@ -106,7 +106,7 @@ function markdownInstructionSetSection(pathToGenDir) { } const bitFormatPath = `./images/bit-formats/${name.replace(/`/g, '')}.png`; if (fs.existsSync(`${pathToGenDir}/${bitFormatPath}`)) { - subsection += `\n![](${bitFormatPath})`; + subsection += `\n[![](${bitFormatPath})](${bitFormatPath})`; } markdown += `\n${subsection}\n`; } diff --git a/yellow-paper/src/preprocess/index.js b/yellow-paper/src/preprocess/index.js index 71c4227df06..fe2167bdf09 100644 --- a/yellow-paper/src/preprocess/index.js +++ b/yellow-paper/src/preprocess/index.js @@ -1,4 +1,4 @@ -const {generateInstructionSet} = require('./InstructionSet/InstructionSetMarkdownGen'); +const {generateInstructionSet} = require('./InstructionSet/genMarkdown'); async function run() { await generateInstructionSet();