diff --git a/.circleci/config.yml b/.circleci/config.yml index fea313dd0c5..794d5a090b7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -346,6 +346,17 @@ jobs: name: Build command: build yarn-project | add_timestamps + yarn-project-prod: + machine: + image: ubuntu-2204:2023.07.2 + resource_class: large + steps: + - *checkout + - *setup_env + - run: + name: Build + command: build yarn-project-prod | add_timestamps + yarn-project-formatting: machine: image: ubuntu-2204:2023.07.2 @@ -368,7 +379,7 @@ jobs: name: Test command: cond_spot_run_container yarn-project 64 test | add_timestamps - aztec-sandbox-x86_64: + aztec-sandbox: machine: image: ubuntu-2204:2023.07.2 resource_class: large @@ -379,22 +390,7 @@ jobs: name: "Build and test" command: build aztec-sandbox - aztec-sandbox-arm64: - machine: - image: ubuntu-2204:2023.07.2 - resource_class: arm.large - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - # We need to force not to use docker buildkit because for some reason on arm only, it ends up making a call - # out to eu-west2 despite the image being locally tagged, resulting in unauthorized 401. Weird docker bug? - command: | - echo "export DOCKER_BUILDKIT=" > $BASH_ENV - build aztec-sandbox - - cli-x86_64: + cli: machine: image: ubuntu-2204:2023.07.2 resource_class: large @@ -405,21 +401,6 @@ jobs: name: "Build and test" command: build cli - cli-arm64: - machine: - image: ubuntu-2204:2023.07.2 - resource_class: arm.large - steps: - - *checkout - - *setup_env - - run: - name: "Build and test" - # We need to force not to use docker buildkit because for some reason on arm only, it ends up making a call - # out to eu-west2 despite the image being locally tagged, resulting in unauthorized 401. Weird docker bug? - command: | - echo "export DOCKER_BUILDKIT=" > $BASH_ENV - build cli - mainnet-fork: machine: image: ubuntu-2204:2023.07.2 @@ -442,22 +423,6 @@ jobs: name: "Build and test" command: build aztec-faucet | add_timestamps - ecr-manifest: - machine: - image: ubuntu-2204:2023.07.2 - resource_class: large - steps: - - *checkout - - *setup_env - - run: - name: "Create ECR manifest" - command: | - create_ecr_manifest aztec-sandbox x86_64,arm64 - create_ecr_manifest cli x86_64,arm64 - create_ecr_manifest aztec-faucet x86_64 - create_ecr_manifest mainnet-fork x86_64 - create_ecr_manifest l1-contracts x86_64 - boxes-blank-react: machine: image: ubuntu-2204:2023.07.2 @@ -939,8 +904,8 @@ jobs: name: "Assemble benchmark summary from uploaded logs" command: ./scripts/ci/assemble_e2e_benchmark.sh - # Deploy jobs. - deploy-mainnet-fork: + # Release jobs. + release-npm: machine: image: ubuntu-2204:2023.07.2 resource_class: medium @@ -948,12 +913,17 @@ jobs: - *checkout - *setup_env - run: - name: "Deploy mainnet fork" + name: "yarn-project" command: | - should_deploy || exit 0 - deploy mainnet-fork + should_release || exit 0 + yarn-project/deploy_npm.sh latest + - run: + name: "l1-contracts" + command: | + should_release || exit 0 + deploy_npm l1-contracts - deploy-contracts: + release-dockerhub: machine: image: ubuntu-2204:2023.07.2 resource_class: medium @@ -961,13 +931,17 @@ jobs: - *checkout - *setup_env - run: - name: "Deploy L1 contracts to mainnet fork" - working_directory: l1-contracts + name: "Release to dockerhub" command: | - should_deploy || exit 0 - ./scripts/ci_deploy_contracts.sh + should_release || exit 0 + deploy_dockerhub noir + deploy_dockerhub aztec-sandbox + deploy_dockerhub cli + deploy_dockerhub aztec-faucet + deploy_dockerhub mainnet-fork - deploy-npm: + # Deploy jobs. + deploy-mainnet-fork: machine: image: ubuntu-2204:2023.07.2 resource_class: medium @@ -975,12 +949,12 @@ jobs: - *checkout - *setup_env - run: - name: "yarn-project" + name: "Deploy mainnet fork" command: | should_deploy || exit 0 - yarn-project/deploy_npm.sh latest + deploy mainnet-fork - deploy-dockerhub: + deploy-contracts: machine: image: ubuntu-2204:2023.07.2 resource_class: medium @@ -988,15 +962,11 @@ jobs: - *checkout - *setup_env - run: - name: "Deploy to dockerhub" + name: "Deploy L1 contracts to mainnet fork" + working_directory: l1-contracts command: | should_deploy || exit 0 - deploy_dockerhub noir x86_64,arm64 - deploy_dockerhub aztec-sandbox x86_64,arm64 - deploy_dockerhub cli x86_64,arm64 - deploy_dockerhub aztec-faucet x86_64 - deploy_dockerhub mainnet-fork x86_64 - deploy_dockerhub l1-contracts x86_64 + ./scripts/ci_deploy_contracts.sh deploy-devnet: machine: @@ -1038,6 +1008,11 @@ defaults_yarn_project: &defaults_yarn_project - yarn-project <<: *defaults +defaults_yarn_project_prod: &defaults_yarn_project_prod + requires: + - yarn-project-prod + <<: *defaults + defaults_deploy: &defaults_deploy requires: - end @@ -1124,42 +1099,37 @@ workflows: requires: - yarn-project-base <<: *defaults + - yarn-project-prod: *defaults_yarn_project - yarn-project-formatting: *defaults_yarn_project - yarn-project-tests: *defaults_yarn_project - end-to-end: *defaults_yarn_project - build-docs: *defaults_yarn_project - - aztec-sandbox-x86_64: *defaults_yarn_project - - aztec-sandbox-arm64: *defaults_yarn_project - - cli-x86_64: *defaults_yarn_project - - cli-arm64: *defaults_yarn_project - - aztec-faucet: *defaults_yarn_project - - ecr-manifest: - requires: - - aztec-sandbox-x86_64 - - aztec-sandbox-arm64 - - cli-x86_64 - - cli-arm64 - <<: *defaults + + # Artifacts + - aztec-sandbox: *defaults_yarn_project_prod + - cli: *defaults_yarn_project_prod + - aztec-faucet: *defaults_yarn_project_prod # Boxes. - boxes-blank-react: requires: - - aztec-sandbox-x86_64 + - aztec-sandbox <<: *defaults - boxes-blank: requires: - - aztec-sandbox-x86_64 + - aztec-sandbox <<: *defaults - boxes-token: requires: - - aztec-sandbox-x86_64 + - aztec-sandbox <<: *defaults # End to end tests. - e2e-join: requires: - end-to-end - - ecr-manifest + - aztec-sandbox + - cli <<: *defaults - e2e-2-pxes: *e2e_test - e2e-deploy-contract: *e2e_test @@ -1242,12 +1212,14 @@ workflows: - bench-process-history <<: *defaults - # Production deployment - - deploy-dockerhub: *defaults_deploy - - deploy-npm: *defaults_deploy + # Production releases. + - release-dockerhub: *defaults_deploy + - release-npm: *defaults_deploy + + # Production deployment. - deploy-mainnet-fork: requires: - - deploy-dockerhub + - release-dockerhub <<: *defaults_deploy - deploy-contracts: requires: @@ -1257,4 +1229,3 @@ workflows: requires: - deploy-contracts <<: *defaults_deploy - diff --git a/.nvmrc b/.nvmrc index 39e593ebeee..eb800ed459a 100644 --- a/.nvmrc +++ b/.nvmrc @@ -1 +1 @@ -v18.8.0 +v18.19.0 diff --git a/.release-please-manifest.json b/.release-please-manifest.json index bf4932462d0..a655b1913f8 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,5 +1,5 @@ { - ".": "0.16.1", - "barretenberg": "0.16.1", - "barretenberg/ts": "0.16.1" + ".": "0.16.7", + "barretenberg": "0.16.7", + "barretenberg/ts": "0.16.7" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bcf97225367..956258bb1b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,179 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.6...aztec-packages-v0.16.7) (2023-12-06) + + +### Features + +* Encapsulated Goblin ([#3524](https://github.com/AztecProtocol/aztec-packages/issues/3524)) ([2f08423](https://github.com/AztecProtocol/aztec-packages/commit/2f08423e37942f991634fe6c45de52feb1f159cf)) + + +### Bug Fixes + +* Extract whole archive instead of subset ([#3604](https://github.com/AztecProtocol/aztec-packages/issues/3604)) ([cb000d8](https://github.com/AztecProtocol/aztec-packages/commit/cb000d828dcea0ec5025bceadd322b1d260c0111)) + + +### Documentation + +* **yellow-paper:** Note hash, nullifier, and public data trees ([#3518](https://github.com/AztecProtocol/aztec-packages/issues/3518)) ([0e2db8b](https://github.com/AztecProtocol/aztec-packages/commit/0e2db8b0a819dfe44dd5c76ff89aaa1f403d2071)) + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.5...aztec-packages-v0.16.6) (2023-12-06) + + +### Bug Fixes + +* **pxe:** Initialise aztecjs on pxe startup ([#3601](https://github.com/AztecProtocol/aztec-packages/issues/3601)) ([ceb2ed2](https://github.com/AztecProtocol/aztec-packages/commit/ceb2ed2618398c6af56e69ec0a9f58b808547f30)) +* Remove api_prefix local ([#3599](https://github.com/AztecProtocol/aztec-packages/issues/3599)) ([0d8dd8d](https://github.com/AztecProtocol/aztec-packages/commit/0d8dd8d14fa002b4dadcd7ea70e01c5b263edaee)) + + +### Miscellaneous + +* **yellow_paper:** Fixes to my work on public private messages ([#3507](https://github.com/AztecProtocol/aztec-packages/issues/3507)) ([33a4f63](https://github.com/AztecProtocol/aztec-packages/commit/33a4f63dc8004d144d891fb8016d85471c64e880)) + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.4...aztec-packages-v0.16.5) (2023-12-06) + + +### Features + +* Add EFS file storage to devnet nodes ([#3584](https://github.com/AztecProtocol/aztec-packages/issues/3584)) ([5b590eb](https://github.com/AztecProtocol/aztec-packages/commit/5b590eb06fab7ecfcd62aa78a04e035dc8db6b41)) + + +### Bug Fixes + +* **ci:** Aztec node devnet healthchecks ([#3598](https://github.com/AztecProtocol/aztec-packages/issues/3598)) ([1a9d742](https://github.com/AztecProtocol/aztec-packages/commit/1a9d742cb21ea71df33eb8931b0faecc96e84508)) +* **ci:** Count for EFS AZ2 ([#3597](https://github.com/AztecProtocol/aztec-packages/issues/3597)) ([d427bca](https://github.com/AztecProtocol/aztec-packages/commit/d427bca1c53aacc499f0895bb172f88d96e9347e)) +* **ci:** L1-contracts npm release ([#3596](https://github.com/AztecProtocol/aztec-packages/issues/3596)) ([008df50](https://github.com/AztecProtocol/aztec-packages/commit/008df5018e8f924ac93ad5d9d712727c51952c54)) +* **ci:** Node health-check + contract address env vars ([#3578](https://github.com/AztecProtocol/aztec-packages/issues/3578)) ([fffc700](https://github.com/AztecProtocol/aztec-packages/commit/fffc7007cf5a5fb5e721c63d4abff5184d40c9c0)) + + +### Miscellaneous + +* Make noir-circuit independent of aztec-nr ([#3591](https://github.com/AztecProtocol/aztec-packages/issues/3591)) ([3013354](https://github.com/AztecProtocol/aztec-packages/commit/301335479f45837e61e1b434566dff98a0867a37)) +* Remove foundation and types deps from boxes ([#3389](https://github.com/AztecProtocol/aztec-packages/issues/3389)) ([eade352](https://github.com/AztecProtocol/aztec-packages/commit/eade352a56b2365b5213962733735e45a6d46fb0)) +* Renaming blockstree to archive ([#3569](https://github.com/AztecProtocol/aztec-packages/issues/3569)) ([6c200e9](https://github.com/AztecProtocol/aztec-packages/commit/6c200e932b6a4bb218059e7b9f92f97c70aa8195)) +* Trivial change roundup ([#3556](https://github.com/AztecProtocol/aztec-packages/issues/3556)) ([ff893b2](https://github.com/AztecProtocol/aztec-packages/commit/ff893b236091b480b6de18ebaab57c62dcdfe1d4)) + + +### Documentation + +* Add libstdc++-12-dev to setup instructions ([#3585](https://github.com/AztecProtocol/aztec-packages/issues/3585)) ([9773e8c](https://github.com/AztecProtocol/aztec-packages/commit/9773e8c3b4789f0dd6b5fdaf0f283b9bd7c9812f)) + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.3...aztec-packages-v0.16.4) (2023-12-05) + + +### Bug Fixes + +* **ci:** Separate step for l1-contracts npm release ([#3581](https://github.com/AztecProtocol/aztec-packages/issues/3581)) ([7745975](https://github.com/AztecProtocol/aztec-packages/commit/7745975731a009c9010291b9174d321941754760)) + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.2...aztec-packages-v0.16.3) (2023-12-05) + + +### Bug Fixes + +* Npm release of l1-contracts ([#3571](https://github.com/AztecProtocol/aztec-packages/issues/3571)) ([487419b](https://github.com/AztecProtocol/aztec-packages/commit/487419be549903a3d42b1232cce02139b2ac556f)) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + + +### Documentation + +* Documenting issue with `context.block_header` ([#3565](https://github.com/AztecProtocol/aztec-packages/issues/3565)) ([1237e26](https://github.com/AztecProtocol/aztec-packages/commit/1237e2658d90114c03a6b838cbab80005aa3a661)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.1...aztec-packages-v0.16.2) (2023-12-05) + + +### Features + +* Add tree snapshots ([#3468](https://github.com/AztecProtocol/aztec-packages/issues/3468)) ([7a86bb3](https://github.com/AztecProtocol/aztec-packages/commit/7a86bb3a5e2bd9db60c1b70e11ced29deca83ff6)) +* **AVM:** First version for mini AVM (ADD, RETURN, CALLDATACOPY) ([#3439](https://github.com/AztecProtocol/aztec-packages/issues/3439)) ([b3af146](https://github.com/AztecProtocol/aztec-packages/commit/b3af1463ed6b7858252ab4779f8c747a6de47363)) +* Circuit optimized indexed tree batch insertion ([#3367](https://github.com/AztecProtocol/aztec-packages/issues/3367)) ([187d2f7](https://github.com/AztecProtocol/aztec-packages/commit/187d2f79d9390e43ec2e2ce6a0db0d6718cc1716)) +* Devnet ([#3473](https://github.com/AztecProtocol/aztec-packages/issues/3473)) ([97c40c2](https://github.com/AztecProtocol/aztec-packages/commit/97c40c26098dc615e95e8555458401afc88d9516)) +* **docs:** Add simple private voting tutorial ([#3402](https://github.com/AztecProtocol/aztec-packages/issues/3402)) ([a6e0352](https://github.com/AztecProtocol/aztec-packages/commit/a6e035275fc07f11d0354d0794eaa15d937ba278)) +* **docs:** Document slow update tree ([#3416](https://github.com/AztecProtocol/aztec-packages/issues/3416)) ([8e9f103](https://github.com/AztecProtocol/aztec-packages/commit/8e9f10349936ee414526915a93f4ec1070de17e4)) +* Flavor refactor, reduce duplication ([#3407](https://github.com/AztecProtocol/aztec-packages/issues/3407)) ([8d6b013](https://github.com/AztecProtocol/aztec-packages/commit/8d6b01304d797f7cbb576b23a7e115390d113c79)) +* Inclusion and non-inclusion proofs experiment ([#3255](https://github.com/AztecProtocol/aztec-packages/issues/3255)) ([b911e65](https://github.com/AztecProtocol/aztec-packages/commit/b911e6546bea5b3e2301b02459c5db8a1ff9024e)), closes [#2572](https://github.com/AztecProtocol/aztec-packages/issues/2572) [#2584](https://github.com/AztecProtocol/aztec-packages/issues/2584) +* New Poseidon2 circuit builder gates ([#3346](https://github.com/AztecProtocol/aztec-packages/issues/3346)) ([91cb369](https://github.com/AztecProtocol/aztec-packages/commit/91cb369aa7ecbf457965f53057cafa2c2e6f1214)) +* New Poseidon2 relations ([#3406](https://github.com/AztecProtocol/aztec-packages/issues/3406)) ([14b9736](https://github.com/AztecProtocol/aztec-packages/commit/14b9736925c6da33133bd24ee283fb4c199082a5)) +* Pull latest noir for brillig optimizations ([#3464](https://github.com/AztecProtocol/aztec-packages/issues/3464)) ([d356bac](https://github.com/AztecProtocol/aztec-packages/commit/d356bac740d203fbb9363a0127ca1d433358e029)) +* Refactor StandardIndexedTree for abstract leaves and preimages and optimized it ([#3530](https://github.com/AztecProtocol/aztec-packages/issues/3530)) ([63b9cdc](https://github.com/AztecProtocol/aztec-packages/commit/63b9cdc5823df540c73b3e53d8e3c4117deb3b02)) +* Removing historical roots from circuits ([#3544](https://github.com/AztecProtocol/aztec-packages/issues/3544)) ([9f682cb](https://github.com/AztecProtocol/aztec-packages/commit/9f682cb8cf37eb392c4979f62fdec7126fb4d102)) +* Seperate pil files for sub machines ([#3454](https://github.com/AztecProtocol/aztec-packages/issues/3454)) ([d09d6f5](https://github.com/AztecProtocol/aztec-packages/commit/d09d6f5a5f2c7e2a58658a640a6a6d6ba4294701)) +* Throw compile time error if contract has too many fns ([#3536](https://github.com/AztecProtocol/aztec-packages/issues/3536)) ([ad66ad0](https://github.com/AztecProtocol/aztec-packages/commit/ad66ad0811181def6ef13c646acfc06261958787)) +* Use tree snapshots in aztec-node/pxe/oracles ([#3504](https://github.com/AztecProtocol/aztec-packages/issues/3504)) ([6e40427](https://github.com/AztecProtocol/aztec-packages/commit/6e4042757feb852dca77c957fc52f41e5b30f848)) +* Yellow paper cross-chain communication ([#3477](https://github.com/AztecProtocol/aztec-packages/issues/3477)) ([d51df8c](https://github.com/AztecProtocol/aztec-packages/commit/d51df8cf6d756e03ffa577b9e35b92a9b723e6c1)) + + +### Bug Fixes + +* Check version, chainid and sender for cross-chain l1 to l2 msgs ([#3457](https://github.com/AztecProtocol/aztec-packages/issues/3457)) ([d251703](https://github.com/AztecProtocol/aztec-packages/commit/d251703213c42c427ed3e0f8ff1098edf3b6a2e3)) +* **ci:** Add DEPLOY_TAG in fork log group ([#3510](https://github.com/AztecProtocol/aztec-packages/issues/3510)) ([f021041](https://github.com/AztecProtocol/aztec-packages/commit/f02104136f2d98325baa21792ea10245abffab76)) +* **ci:** Check if l1 contracts img has been deployed ([#3531](https://github.com/AztecProtocol/aztec-packages/issues/3531)) ([ac1f03c](https://github.com/AztecProtocol/aztec-packages/commit/ac1f03c995457df161ce59b181664950871b6436)) +* **ci:** Comment out LB listeners (for now) ([#3519](https://github.com/AztecProtocol/aztec-packages/issues/3519)) ([640aabc](https://github.com/AztecProtocol/aztec-packages/commit/640aabc414876a3dacb5287e2705380a9fafca9f)) +* **ci:** Count for bootnode discovery service ([#3517](https://github.com/AztecProtocol/aztec-packages/issues/3517)) ([2a38788](https://github.com/AztecProtocol/aztec-packages/commit/2a38788ee7857162a9af391323f53187e670dedc)) +* **ci:** Define REPOSITORY in deploy_l1_contracts ([#3514](https://github.com/AztecProtocol/aztec-packages/issues/3514)) ([b246d1b](https://github.com/AztecProtocol/aztec-packages/commit/b246d1ba3a899af5e7566944a9d79be62827cdd5)) +* **ci:** Don't deploy to npm on master merge ([#3502](https://github.com/AztecProtocol/aztec-packages/issues/3502)) ([a138860](https://github.com/AztecProtocol/aztec-packages/commit/a138860bf4032be9688c5ffb5d95b12bcb6d459e)) +* **ci:** Env vars for deploying l1-contracts ([#3513](https://github.com/AztecProtocol/aztec-packages/issues/3513)) ([27106b2](https://github.com/AztecProtocol/aztec-packages/commit/27106b2e2845cb32ea229a8527b86a691a668f20)) +* **ci:** Export FORK_API_KEY from setup_env ([#3512](https://github.com/AztecProtocol/aztec-packages/issues/3512)) ([7e81e2c](https://github.com/AztecProtocol/aztec-packages/commit/7e81e2c53deaf2b5efcc6b0567fc1240540471eb)) +* **ci:** Fix docker architecture for devnet packages ([#3505](https://github.com/AztecProtocol/aztec-packages/issues/3505)) ([66d0287](https://github.com/AztecProtocol/aztec-packages/commit/66d02879a33ded27e188b90b1d7ac6b551830acc)) +* **ci:** Fix faucet vars + don't deploy contracts from node ([#3553](https://github.com/AztecProtocol/aztec-packages/issues/3553)) ([c7176f6](https://github.com/AztecProtocol/aztec-packages/commit/c7176f6c04486a3f261a48958ccadba684f33521)) +* **ci:** L1 contracts directories ([#3545](https://github.com/AztecProtocol/aztec-packages/issues/3545)) ([63dd0c8](https://github.com/AztecProtocol/aztec-packages/commit/63dd0c8852ca7605a2407458b355b3776a96b37c)) +* **ci:** Login to ecr to fetch contracts image ([#3538](https://github.com/AztecProtocol/aztec-packages/issues/3538)) ([b033538](https://github.com/AztecProtocol/aztec-packages/commit/b0335383c884d81562c2911ecae9d889f1076254)) +* **ci:** Remove unused ADDRESS vars & export private key vars ([#3520](https://github.com/AztecProtocol/aztec-packages/issues/3520)) ([d889359](https://github.com/AztecProtocol/aztec-packages/commit/d8893590a8f6f7b1d0a60279a6a2bc9fd0b5c154)) +* **ci:** Set default value for $TO_TAINT ([#3508](https://github.com/AztecProtocol/aztec-packages/issues/3508)) ([8b6688a](https://github.com/AztecProtocol/aztec-packages/commit/8b6688a7975a748f910f67ee17dbc61fd1df7001)) +* **ci:** Terraform listener resources ([#3534](https://github.com/AztecProtocol/aztec-packages/issues/3534)) ([c3b9cce](https://github.com/AztecProtocol/aztec-packages/commit/c3b9cce96599451fce79fd3318176da4708bfa6a)) +* **ci:** Terraform_deploy for devnet ([#3516](https://github.com/AztecProtocol/aztec-packages/issues/3516)) ([ba3803e](https://github.com/AztecProtocol/aztec-packages/commit/ba3803ec7c208804f8da5ee81b9989f4640a2fc1)) +* **ci:** Tf variable references & formatting([#3522](https://github.com/AztecProtocol/aztec-packages/issues/3522)) ([d37cf52](https://github.com/AztecProtocol/aztec-packages/commit/d37cf520348e17acdc9de93bc2cf83560ccf57d5)) +* Disable e2e-slow-tree ([#3459](https://github.com/AztecProtocol/aztec-packages/issues/3459)) ([5927103](https://github.com/AztecProtocol/aztec-packages/commit/59271039b3a087a4f33b11701929cebf2eadb61d)) +* **docs:** Update package name of aztec-cli ([#3474](https://github.com/AztecProtocol/aztec-packages/issues/3474)) ([98d7ba0](https://github.com/AztecProtocol/aztec-packages/commit/98d7ba0c1d8c809f1bcb05e517412f99e46f95ae)) +* Double slash in deployed faucet routes ([#3555](https://github.com/AztecProtocol/aztec-packages/issues/3555)) ([6c704a5](https://github.com/AztecProtocol/aztec-packages/commit/6c704a5502746e8a002e039ce8c73e8e207ca9d0)) +* Faucet lb_listener priority ([#3554](https://github.com/AztecProtocol/aztec-packages/issues/3554)) ([3f56dd7](https://github.com/AztecProtocol/aztec-packages/commit/3f56dd7cacfda0eb7a4bf0c38ec804a85e6881d2)) +* Handling low_nullifier.next_value equal to 0 ([#3562](https://github.com/AztecProtocol/aztec-packages/issues/3562)) ([c800502](https://github.com/AztecProtocol/aztec-packages/commit/c8005023d80a2a4e15d3a3bea10072371e3c5842)), closes [#3550](https://github.com/AztecProtocol/aztec-packages/issues/3550) +* Remove x86_64 form l1-contracts img tag ([#3549](https://github.com/AztecProtocol/aztec-packages/issues/3549)) ([6828f1a](https://github.com/AztecProtocol/aztec-packages/commit/6828f1ac33755ca6ccf42096d741d5ea326dae66)) +* Throw error if fn sig has whitespaces ([#3509](https://github.com/AztecProtocol/aztec-packages/issues/3509)) ([7671063](https://github.com/AztecProtocol/aztec-packages/commit/7671063a2cb32c45a751c33f6ed5e1b8bea8608f)), closes [#3055](https://github.com/AztecProtocol/aztec-packages/issues/3055) + + +### Miscellaneous + +* (yellow paper) public-vm section of yellow paper ([#3493](https://github.com/AztecProtocol/aztec-packages/issues/3493)) ([8ff3780](https://github.com/AztecProtocol/aztec-packages/commit/8ff378005f78126260cb0950a8167ec40efd14aa)) +* Add mermaid diagram support ([#3499](https://github.com/AztecProtocol/aztec-packages/issues/3499)) ([537d552](https://github.com/AztecProtocol/aztec-packages/commit/537d552009676a7dfed2d75e7f73a572591699af)) +* Add yellow paper build check to CI ([#3490](https://github.com/AztecProtocol/aztec-packages/issues/3490)) ([3ebd2f2](https://github.com/AztecProtocol/aztec-packages/commit/3ebd2f25646c7db170d22c62f41888d0c417d644)) +* **avm:** Enable AVM unit tests in CI ([#3463](https://github.com/AztecProtocol/aztec-packages/issues/3463)) ([051dda9](https://github.com/AztecProtocol/aztec-packages/commit/051dda9c50f1d9f11f5063ddf51c9986a6998b43)), closes [#3461](https://github.com/AztecProtocol/aztec-packages/issues/3461) +* **bb:** Pointer_view to reference-based get_all ([#3495](https://github.com/AztecProtocol/aztec-packages/issues/3495)) ([50d7327](https://github.com/AztecProtocol/aztec-packages/commit/50d73271919306a05ac3a7c2e7d37363b6761248)) +* **bb:** Reuse entities from GoblinUltra in GoblinUltraRecursive ([#3521](https://github.com/AztecProtocol/aztec-packages/issues/3521)) ([8259636](https://github.com/AztecProtocol/aztec-packages/commit/8259636c016c0adecb052f176e78444fb5481c38)) +* Build the acir test vectors as part of CI. ([#3447](https://github.com/AztecProtocol/aztec-packages/issues/3447)) ([1a2d1f8](https://github.com/AztecProtocol/aztec-packages/commit/1a2d1f822d0e1fabd322c2c4d0473629edd56380)) +* Containers reduced to ~100MB total. ~30s installation. ([#3487](https://github.com/AztecProtocol/aztec-packages/issues/3487)) ([b49cef2](https://github.com/AztecProtocol/aztec-packages/commit/b49cef21e30f06bce23f421b533e64728278cbf8)) +* **docs:** Fix broken Noir stdlib link ([#3496](https://github.com/AztecProtocol/aztec-packages/issues/3496)) ([787d59a](https://github.com/AztecProtocol/aztec-packages/commit/787d59a1a583788773a0e5d75a9079328ce2a21d)) +* Field-agnostic and reusable transcript ([#3433](https://github.com/AztecProtocol/aztec-packages/issues/3433)) ([d78775a](https://github.com/AztecProtocol/aztec-packages/commit/d78775adb9574a3d76c3fca8cf940cdef460ae10)) +* Fix broken link in txs in yellow paper ([#3484](https://github.com/AztecProtocol/aztec-packages/issues/3484)) ([798565d](https://github.com/AztecProtocol/aztec-packages/commit/798565d5a8a5cb096c9b2efb6d41de1c449d2c4e)) +* Fix yellow paper build error ([32881a4](https://github.com/AztecProtocol/aztec-packages/commit/32881a4d0912e0287b558a4785b6d60c50f84335)) +* Fixed typo in build system ([#3501](https://github.com/AztecProtocol/aztec-packages/issues/3501)) ([3a80ac2](https://github.com/AztecProtocol/aztec-packages/commit/3a80ac2caf5f1f847f5e6b2a7b526b81a211de29)) +* Increase functions per contract from 16 to 32 ([#3503](https://github.com/AztecProtocol/aztec-packages/issues/3503)) ([ebdeea3](https://github.com/AztecProtocol/aztec-packages/commit/ebdeea3f4bc721d5708b44ba1f89ba24eb0e25d5)) +* Naming fixes ([#3476](https://github.com/AztecProtocol/aztec-packages/issues/3476)) ([1db30bf](https://github.com/AztecProtocol/aztec-packages/commit/1db30bf0d61a7b2920ab1aedaef58bc0922ec78e)) +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) +* Recursive verifier updates ([#3452](https://github.com/AztecProtocol/aztec-packages/issues/3452)) ([dbb4a12](https://github.com/AztecProtocol/aztec-packages/commit/dbb4a1205528bdd8217ea2d15ccf060e2aa9b7d2)) +* Refactor `WitnessEntities` to be able to derive `WitnessCommitments` from it ([#3479](https://github.com/AztecProtocol/aztec-packages/issues/3479)) ([9c9b561](https://github.com/AztecProtocol/aztec-packages/commit/9c9b561f392de5fce11cefe4d72e4f33f2567f41)) +* Remove temporary logging ([#3466](https://github.com/AztecProtocol/aztec-packages/issues/3466)) ([8c8387b](https://github.com/AztecProtocol/aztec-packages/commit/8c8387b6b18335ca23f62c3d4c942415b7449462)) +* Transcript handled through shared_ptr ([#3434](https://github.com/AztecProtocol/aztec-packages/issues/3434)) ([30fca33](https://github.com/AztecProtocol/aztec-packages/commit/30fca3307ee7e33d81fd51c3d280c6362baef0b9)) +* Typo fixes ([#3488](https://github.com/AztecProtocol/aztec-packages/issues/3488)) ([d9a44dc](https://github.com/AztecProtocol/aztec-packages/commit/d9a44dc2e655752e1c6503ac85b64169ec7e4754)) +* **yellow_paper:** Public<>private messaging ([#3491](https://github.com/AztecProtocol/aztec-packages/issues/3491)) ([6ecc406](https://github.com/AztecProtocol/aztec-packages/commit/6ecc406159a022e5d57267dcaea48e0df25bbda0)) + + +### Documentation + +* Add transaction section to yellow paper ([#3418](https://github.com/AztecProtocol/aztec-packages/issues/3418)) ([44bf30b](https://github.com/AztecProtocol/aztec-packages/commit/44bf30b0af5a546e375d068790e9fa7e94d6ca52)) +* Apply comments from Jan on contracts ([#3539](https://github.com/AztecProtocol/aztec-packages/issues/3539)) ([e351873](https://github.com/AztecProtocol/aztec-packages/commit/e351873cadb5cbca5d1d61016e6f9a9e7479bff9)) +* Fees update in yellow paper ([#3486](https://github.com/AztecProtocol/aztec-packages/issues/3486)) ([a8b2608](https://github.com/AztecProtocol/aztec-packages/commit/a8b26086306bfec6e7808f4858a08644e84336f4)) +* First go at generated AVM instruction set doc ([#3469](https://github.com/AztecProtocol/aztec-packages/issues/3469)) ([8cc54a4](https://github.com/AztecProtocol/aztec-packages/commit/8cc54a48917ff319a5c2b706e01cfbf5ebca013e)) +* Further update to the yellow paper ([#3542](https://github.com/AztecProtocol/aztec-packages/issues/3542)) ([751bb6a](https://github.com/AztecProtocol/aztec-packages/commit/751bb6a2075705931b3035117512a93769142707)) +* Yellow paper updates ([#3478](https://github.com/AztecProtocol/aztec-packages/issues/3478)) ([11f754d](https://github.com/AztecProtocol/aztec-packages/commit/11f754d256cc164ca2d50b9923aeba1612e7f48b)) +* Yellow paper updates for private message delivery ([#3472](https://github.com/AztecProtocol/aztec-packages/issues/3472)) ([6ba9e18](https://github.com/AztecProtocol/aztec-packages/commit/6ba9e18820c85acca692d2af03e4d800c29ab6dc)) +* **yellow-paper:** Sync, enqueued, and static calls ([#3494](https://github.com/AztecProtocol/aztec-packages/issues/3494)) ([00835c6](https://github.com/AztecProtocol/aztec-packages/commit/00835c67b460074fe16e19b27a47ac37273e743b)), closes [#3108](https://github.com/AztecProtocol/aztec-packages/issues/3108) +* **yellowpaper:** Instruction set updates and fixes ([#3515](https://github.com/AztecProtocol/aztec-packages/issues/3515)) ([bfb61dd](https://github.com/AztecProtocol/aztec-packages/commit/bfb61dd1412e856adc912f0e3133cd6f8c9e8fbf)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/aztec-packages-v0.16.0...aztec-packages-v0.16.1) (2023-11-28) diff --git a/barretenberg/.gitrepo b/barretenberg/.gitrepo index e509f3d505f..4ba3cac6d74 100644 --- a/barretenberg/.gitrepo +++ b/barretenberg/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/barretenberg branch = master - commit = fe670eda0228568c1a974bd54954b58750cfa462 - parent = ebdeea3f4bc721d5708b44ba1f89ba24eb0e25d5 + commit = 4e4634542814f15d3703fe560874e08eda57e1fa + parent = 379b5adc259ac69b01e61b852172cdfc87cf9350 method = merge cmdver = 0.4.6 diff --git a/barretenberg/CHANGELOG.md b/barretenberg/CHANGELOG.md index 961b93e492c..65412da7cf3 100644 --- a/barretenberg/CHANGELOG.md +++ b/barretenberg/CHANGELOG.md @@ -1,5 +1,72 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.6...barretenberg-v0.16.7) (2023-12-06) + + +### Features + +* Encapsulated Goblin ([#3524](https://github.com/AztecProtocol/aztec-packages/issues/3524)) ([2f08423](https://github.com/AztecProtocol/aztec-packages/commit/2f08423e37942f991634fe6c45de52feb1f159cf)) + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.5...barretenberg-v0.16.6) (2023-12-06) + + +### Miscellaneous + +* **barretenberg:** Synchronize aztec-packages versions + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.4...barretenberg-v0.16.5) (2023-12-06) + + +### Miscellaneous + +* Trivial change roundup ([#3556](https://github.com/AztecProtocol/aztec-packages/issues/3556)) ([ff893b2](https://github.com/AztecProtocol/aztec-packages/commit/ff893b236091b480b6de18ebaab57c62dcdfe1d4)) + + +### Documentation + +* Add libstdc++-12-dev to setup instructions ([#3585](https://github.com/AztecProtocol/aztec-packages/issues/3585)) ([9773e8c](https://github.com/AztecProtocol/aztec-packages/commit/9773e8c3b4789f0dd6b5fdaf0f283b9bd7c9812f)) + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.3...barretenberg-v0.16.4) (2023-12-05) + + +### Miscellaneous + +* **barretenberg:** Synchronize aztec-packages versions + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.2...barretenberg-v0.16.3) (2023-12-05) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.1...barretenberg-v0.16.2) (2023-12-05) + + +### Features + +* **AVM:** First version for mini AVM (ADD, RETURN, CALLDATACOPY) ([#3439](https://github.com/AztecProtocol/aztec-packages/issues/3439)) ([b3af146](https://github.com/AztecProtocol/aztec-packages/commit/b3af1463ed6b7858252ab4779f8c747a6de47363)) +* Flavor refactor, reduce duplication ([#3407](https://github.com/AztecProtocol/aztec-packages/issues/3407)) ([8d6b013](https://github.com/AztecProtocol/aztec-packages/commit/8d6b01304d797f7cbb576b23a7e115390d113c79)) +* New Poseidon2 circuit builder gates ([#3346](https://github.com/AztecProtocol/aztec-packages/issues/3346)) ([91cb369](https://github.com/AztecProtocol/aztec-packages/commit/91cb369aa7ecbf457965f53057cafa2c2e6f1214)) +* New Poseidon2 relations ([#3406](https://github.com/AztecProtocol/aztec-packages/issues/3406)) ([14b9736](https://github.com/AztecProtocol/aztec-packages/commit/14b9736925c6da33133bd24ee283fb4c199082a5)) +* Pull latest noir for brillig optimizations ([#3464](https://github.com/AztecProtocol/aztec-packages/issues/3464)) ([d356bac](https://github.com/AztecProtocol/aztec-packages/commit/d356bac740d203fbb9363a0127ca1d433358e029)) +* Seperate pil files for sub machines ([#3454](https://github.com/AztecProtocol/aztec-packages/issues/3454)) ([d09d6f5](https://github.com/AztecProtocol/aztec-packages/commit/d09d6f5a5f2c7e2a58658a640a6a6d6ba4294701)) + + +### Miscellaneous + +* **avm:** Enable AVM unit tests in CI ([#3463](https://github.com/AztecProtocol/aztec-packages/issues/3463)) ([051dda9](https://github.com/AztecProtocol/aztec-packages/commit/051dda9c50f1d9f11f5063ddf51c9986a6998b43)), closes [#3461](https://github.com/AztecProtocol/aztec-packages/issues/3461) +* **bb:** Pointer_view to reference-based get_all ([#3495](https://github.com/AztecProtocol/aztec-packages/issues/3495)) ([50d7327](https://github.com/AztecProtocol/aztec-packages/commit/50d73271919306a05ac3a7c2e7d37363b6761248)) +* **bb:** Reuse entities from GoblinUltra in GoblinUltraRecursive ([#3521](https://github.com/AztecProtocol/aztec-packages/issues/3521)) ([8259636](https://github.com/AztecProtocol/aztec-packages/commit/8259636c016c0adecb052f176e78444fb5481c38)) +* Build the acir test vectors as part of CI. ([#3447](https://github.com/AztecProtocol/aztec-packages/issues/3447)) ([1a2d1f8](https://github.com/AztecProtocol/aztec-packages/commit/1a2d1f822d0e1fabd322c2c4d0473629edd56380)) +* Field-agnostic and reusable transcript ([#3433](https://github.com/AztecProtocol/aztec-packages/issues/3433)) ([d78775a](https://github.com/AztecProtocol/aztec-packages/commit/d78775adb9574a3d76c3fca8cf940cdef460ae10)) +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) +* Recursive verifier updates ([#3452](https://github.com/AztecProtocol/aztec-packages/issues/3452)) ([dbb4a12](https://github.com/AztecProtocol/aztec-packages/commit/dbb4a1205528bdd8217ea2d15ccf060e2aa9b7d2)) +* Refactor `WitnessEntities` to be able to derive `WitnessCommitments` from it ([#3479](https://github.com/AztecProtocol/aztec-packages/issues/3479)) ([9c9b561](https://github.com/AztecProtocol/aztec-packages/commit/9c9b561f392de5fce11cefe4d72e4f33f2567f41)) +* Transcript handled through shared_ptr ([#3434](https://github.com/AztecProtocol/aztec-packages/issues/3434)) ([30fca33](https://github.com/AztecProtocol/aztec-packages/commit/30fca3307ee7e33d81fd51c3d280c6362baef0b9)) +* Typo fixes ([#3488](https://github.com/AztecProtocol/aztec-packages/issues/3488)) ([d9a44dc](https://github.com/AztecProtocol/aztec-packages/commit/d9a44dc2e655752e1c6503ac85b64169ec7e4754)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg-v0.16.0...barretenberg-v0.16.1) (2023-11-28) diff --git a/barretenberg/README.md b/barretenberg/README.md index fd24c4fa039..b87cf333b54 100644 --- a/barretenberg/README.md +++ b/barretenberg/README.md @@ -46,12 +46,13 @@ Ignores proving key construction. - Ninja (used by the presets as the default generator) - clang >= 16 or gcc >= 10 - clang-format +- libstdc++ >= 12 - libomp (if multithreading is required. Multithreading can be disabled using the compiler flag `-DMULTITHREADING 0`) To install on Ubuntu, run: ``` -sudo apt-get install cmake clang clang-format ninja-build +sudo apt-get install cmake clang clang-format ninja-build libstdc++-12-dev ``` ### Installing openMP (Linux) diff --git a/barretenberg/acir_tests/Dockerfile.bb b/barretenberg/acir_tests/Dockerfile.bb index 000627d8172..7e211d62eca 100644 --- a/barretenberg/acir_tests/Dockerfile.bb +++ b/barretenberg/acir_tests/Dockerfile.bb @@ -1,7 +1,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-assert FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add git bash curl jq coreutils COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build COPY --from=noir-acir-tests /usr/src/noir/test_programs /usr/src/noir/test_programs diff --git a/barretenberg/acir_tests/Dockerfile.bb.js b/barretenberg/acir_tests/Dockerfile.bb.js index 1b86d6e953d..760b231fc90 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.js +++ b/barretenberg/acir_tests/Dockerfile.bb.js @@ -1,8 +1,8 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18 -COPY --from=0 /usr/src/barretenberg/ts /usr/src/barretenberg/ts +FROM node:18.19.0 +COPY --from=0 /usr/src/barretenberg/ts-build /usr/src/barretenberg/ts COPY --from=noir-acir-tests /usr/src/noir/test_programs /usr/src/noir/test_programs RUN apt update && apt install -y lsof jq WORKDIR /usr/src/barretenberg/acir_tests diff --git a/barretenberg/acir_tests/Dockerfile.bb.sol b/barretenberg/acir_tests/Dockerfile.bb.sol index 7840d0c2e4f..40ffa075078 100644 --- a/barretenberg/acir_tests/Dockerfile.bb.sol +++ b/barretenberg/acir_tests/Dockerfile.bb.sol @@ -2,7 +2,7 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clan FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-x86_64-linux-clang-sol FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-acir-tests as noir-acir-tests -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add git bash curl jq COPY --from=0 /usr/src/barretenberg/cpp/build /usr/src/barretenberg/cpp/build COPY --from=1 /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol /usr/src/barretenberg/sol/src/ultra/BaseUltraVerifier.sol diff --git a/barretenberg/cpp/CMakeLists.txt b/barretenberg/cpp/CMakeLists.txt index ed824a57ba7..07d72011c21 100644 --- a/barretenberg/cpp/CMakeLists.txt +++ b/barretenberg/cpp/CMakeLists.txt @@ -6,7 +6,7 @@ cmake_minimum_required(VERSION 3.24) project( Barretenberg DESCRIPTION "BN254 elliptic curve library, and PLONK SNARK prover" - VERSION 0.16.1 # x-release-please-version + VERSION 0.16.7 # x-release-please-version LANGUAGES CXX C ) # Insert version into `bb` config file diff --git a/barretenberg/cpp/pil/fib/fibonacci.pil b/barretenberg/cpp/pil/fib/fibonacci.pil deleted file mode 100644 index 993a675e7d2..00000000000 --- a/barretenberg/cpp/pil/fib/fibonacci.pil +++ /dev/null @@ -1,18 +0,0 @@ -constant %N = 16; - -// This uses the alternative nomenclature as well. - -namespace Fibonacci(%N); - col fixed LAST(i) { match i { - %N - 1 => 1, - _ => 0, - } }; - col fixed FIRST(i) { match i { - 0 => 1, - _ => 0, - } }; - col witness x, y; - - (1-FIRST) * (1-LAST) * (x' - y) = 0; - (1-FIRST) * (1-LAST) * (y' - (x + y)) = 0; - diff --git a/barretenberg/cpp/pil/fib/fibonacci_opt.pil b/barretenberg/cpp/pil/fib/fibonacci_opt.pil deleted file mode 100644 index 2c36cd15327..00000000000 --- a/barretenberg/cpp/pil/fib/fibonacci_opt.pil +++ /dev/null @@ -1,8 +0,0 @@ -constant %N = 16; -namespace Fibonacci(16); - col fixed LAST(i) { match i { (%N - 1) => 1, _ => 0, } }; - col fixed FIRST(i) { match i { 0 => 1, _ => 0, } }; - col witness x; - col witness y; - (((1 - Fibonacci.FIRST) * (1 - Fibonacci.LAST)) * (Fibonacci.x' - Fibonacci.y)) = 0; - (((1 - Fibonacci.FIRST) * (1 - Fibonacci.LAST)) * (Fibonacci.y' - (Fibonacci.x + Fibonacci.y))) = 0; diff --git a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp index 6f223939a4b..0ef9fabfa0a 100644 --- a/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp +++ b/barretenberg/cpp/src/barretenberg/benchmark/honk_bench/benchmark_utilities.hpp @@ -218,7 +218,7 @@ void construct_proof_with_specified_num_iterations( Composer composer; for (auto _ : state) { - // Constuct circuit and prover; don't include this part in measurement + // Construct circuit and prover; don't include this part in measurement state.PauseTiming(); auto prover = get_prover(composer, test_circuit_function, num_iterations); state.ResumeTiming(); diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp index 16d3f3a9cfb..60b5804a47d 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.cpp @@ -2,7 +2,7 @@ #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" #include "barretenberg/common/ref_array.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/honk/proof_system/power_polynomial.hpp" #include "barretenberg/polynomials/polynomial.hpp" @@ -183,7 +183,7 @@ template void ECCVMProver_::execute_log_derivative_ gamma * (gamma + beta_sqr) * (gamma + beta_sqr + beta_sqr) * (gamma + beta_sqr + beta_sqr + beta_sqr); relation_parameters.eccvm_set_permutation_delta = relation_parameters.eccvm_set_permutation_delta.invert(); // Compute inverse polynomial for our logarithmic-derivative lookup method - lookup_library::compute_logderivative_inverse( + logderivative_library::compute_logderivative_inverse( prover_polynomials, relation_parameters, key->circuit_size); transcript->send_to_verifier(commitment_labels.lookup_inverses, commitment_key->commit(key->lookup_inverses)); prover_polynomials.lookup_inverses = key->lookup_inverses; @@ -345,31 +345,34 @@ template void ECCVMProver_::execute_transcript_cons transcript->send_to_verifier("Translation:hack_evaluation", hack.evaluate(evaluation_challenge_x)); // Get another challenge for batching the univariate claims - FF batching_challenge = transcript->get_challenge("Translation:batching_challenge"); + FF ipa_batching_challenge = transcript->get_challenge("Translation:ipa_batching_challenge"); // Collect the polynomials and evaluations to be batched RefArray univariate_polynomials{ key->transcript_op, key->transcript_Px, key->transcript_Py, key->transcript_z1, key->transcript_z2, hack }; std::array univariate_evaluations; - // Constuct the batched polynomial and batched evaluation + // Construct the batched polynomial and batched evaluation Polynomial batched_univariate{ key->circuit_size }; FF batched_evaluation{ 0 }; auto batching_scalar = FF(1); for (auto [polynomial, eval] : zip_view(univariate_polynomials, univariate_evaluations)) { batched_univariate.add_scaled(polynomial, batching_scalar); batched_evaluation += eval * batching_scalar; - batching_scalar *= batching_challenge; + batching_scalar *= ipa_batching_challenge; } // Compute a proof for the batched univariate opening PCS::compute_opening_proof( commitment_key, { evaluation_challenge_x, batched_evaluation }, batched_univariate, transcript); + + // Get another challenge for batching the univariate claims + translation_batching_challenge_v = transcript->get_challenge("Translation:batching_challenge"); } template plonk::proof& ECCVMProver_::export_proof() { - proof.proof_data = transcript->proof_data; + proof.proof_data = transcript->export_proof(); return proof; } diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp index 39b42b89688..af6f4ec457f 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_prover.hpp @@ -69,6 +69,7 @@ template class ECCVMProver_ { Polynomial quotient_W; FF evaluation_challenge_x; + FF translation_batching_challenge_v; // to be rederived by the translator verifier sumcheck::SumcheckOutput sumcheck_output; pcs::gemini::ProverOutput gemini_output; diff --git a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp index d7f26ce7299..76058a9c800 100644 --- a/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/eccvm/eccvm_verifier.cpp @@ -253,16 +253,17 @@ template bool ECCVMVerifier_::verify_proof(const plonk transcript->template receive_from_prover("Translation:hack_evaluation") }; - FF batching_challenge = transcript->get_challenge("Translation:batching_challenge"); + // Get another challenge for batching the univariate claims + FF ipa_batching_challenge = transcript->get_challenge("Translation:ipa_batching_challenge"); - // Constuct batched commitment and batched evaluation + // Construct batched commitment and batched evaluation auto batched_commitment = transcript_commitments[0]; auto batched_transcript_eval = transcript_evaluations[0]; - auto batching_scalar = batching_challenge; + auto batching_scalar = ipa_batching_challenge; for (size_t idx = 1; idx < transcript_commitments.size(); ++idx) { batched_commitment = batched_commitment + transcript_commitments[idx] * batching_scalar; batched_transcript_eval += batching_scalar * transcript_evaluations[idx]; - batching_scalar *= batching_challenge; + batching_scalar *= ipa_batching_challenge; } // Construct and verify batched opening claim diff --git a/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt index 19dc8bec7b1..f2c41054b03 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/flavor/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(flavor commitment_schemes ecc polynomials proof_system) \ No newline at end of file +barretenberg_module(flavor commitment_schemes ecc polynomials proof_system) diff --git a/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp b/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp deleted file mode 100644 index 9572fedd3ff..00000000000 --- a/barretenberg/cpp/src/barretenberg/flavor/generated/Fib_flavor.hpp +++ /dev/null @@ -1,296 +0,0 @@ - - -#pragma once -#include "../relation_definitions_fwd.hpp" -#include "barretenberg/commitment_schemes/kzg/kzg.hpp" -#include "barretenberg/ecc/curves/bn254/g1.hpp" -#include "barretenberg/polynomials/barycentric.hpp" -#include "barretenberg/polynomials/univariate.hpp" - -#include "barretenberg/flavor/flavor.hpp" -#include "barretenberg/flavor/flavor_macros.hpp" -#include "barretenberg/polynomials/evaluation_domain.hpp" -#include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/relations/generated/Fib.hpp" -#include "barretenberg/transcript/transcript.hpp" - -namespace proof_system::honk { -namespace flavor { - -class FibFlavor { - public: - using Curve = curve::BN254; - using G1 = Curve::Group; - using PCS = pcs::kzg::KZG; - - using FF = G1::subgroup_field; - using Polynomial = barretenberg::Polynomial; - using PolynomialHandle = std::span; - using GroupElement = G1::element; - using Commitment = G1::affine_element; - using CommitmentHandle = G1::affine_element; - using CommitmentKey = pcs::CommitmentKey; - using VerifierCommitmentKey = pcs::VerifierCommitmentKey; - - static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 2; - static constexpr size_t NUM_WITNESS_ENTITIES = 2; - static constexpr size_t NUM_WIRES = NUM_WITNESS_ENTITIES + NUM_PRECOMPUTED_ENTITIES; - // We have two copies of the witness entities, so we subtract the number of fixed ones (they have no shift), one for - // the unshifted and one for the shifted - static constexpr size_t NUM_ALL_ENTITIES = 6; - - using Relations = std::tuple>; - - static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); - - // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` - // random polynomial e.g. For \sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation - // length = 3 - static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; - static constexpr size_t NUM_RELATIONS = std::tuple_size::value; - - template - using ProtogalaxyTupleOfTuplesOfUnivariates = - decltype(create_protogalaxy_tuple_of_tuples_of_univariates()); - using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); - using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); - - static constexpr bool has_zero_row = true; - - private: - template class PrecomputedEntities : public PrecomputedEntitiesBase { - public: - using DataType = DataType_; - DEFINE_FLAVOR_MEMBERS(DataType, Fibonacci_LAST, Fibonacci_FIRST) - - RefVector get_selectors() - { - return { - Fibonacci_LAST, - Fibonacci_FIRST, - }; - }; - - RefVector get_sigma_polynomials() { return {}; }; - RefVector get_id_polynomials() { return {}; }; - RefVector get_table_polynomials() { return {}; }; - }; - - template class WitnessEntities { - public: - DEFINE_FLAVOR_MEMBERS(DataType, Fibonacci_x, Fibonacci_y) - - RefVector get_wires() - { - return { - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_sorted_polynomials() { return {}; }; - }; - - template class AllEntities { - public: - DEFINE_FLAVOR_MEMBERS( - DataType, Fibonacci_LAST, Fibonacci_FIRST, Fibonacci_x, Fibonacci_y, Fibonacci_x_shift, Fibonacci_y_shift) - - RefVector get_wires() - { - return { - Fibonacci_LAST, Fibonacci_FIRST, Fibonacci_x, Fibonacci_y, Fibonacci_x_shift, Fibonacci_y_shift, - - }; - }; - - RefVector get_unshifted() - { - return { - Fibonacci_LAST, - Fibonacci_FIRST, - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_to_be_shifted() - { - return { - Fibonacci_x, - Fibonacci_y, - - }; - }; - - RefVector get_shifted() - { - return { - Fibonacci_x_shift, - Fibonacci_y_shift, - - }; - }; - }; - - public: - class ProvingKey : public ProvingKey_, WitnessEntities> { - public: - // Expose constructors on the base class - using Base = ProvingKey_, WitnessEntities>; - using Base::Base; - - // The plookup wires that store plookup read data. - std::array get_table_column_wires() { return {}; }; - }; - - using VerificationKey = VerificationKey_>; - - using ProverPolynomials = AllEntities; - - using FoldedPolynomials = AllEntities>; - - class AllValues : public AllEntities { - public: - using Base = AllEntities; - using Base::Base; - }; - - class AllPolynomials : public AllEntities { - public: - [[nodiscard]] size_t get_polynomial_size() const { return this->Fibonacci_LAST.size(); } - [[nodiscard]] AllValues get_row(const size_t row_idx) const - { - AllValues result; - for (auto [result_field, polynomial] : zip_view(result.get_all(), get_all())) { - result_field = polynomial[row_idx]; - } - return result; - } - }; - - using RowPolynomials = AllEntities; - - class PartiallyEvaluatedMultivariates : public AllEntities { - public: - PartiallyEvaluatedMultivariates() = default; - PartiallyEvaluatedMultivariates(const size_t circuit_size) - { - // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) - for (auto& poly : get_all()) { - poly = Polynomial(circuit_size / 2); - } - } - }; - - /** - * @brief A container for univariates used during Protogalaxy folding and sumcheck. - * @details During folding and sumcheck, the prover evaluates the relations on these univariates. - */ - template using ProverUnivariates = AllEntities>; - - /** - * @brief A container for univariates produced during the hot loop in sumcheck. - */ - using ExtendedEdges = ProverUnivariates; - - class CommitmentLabels : public AllEntities { - private: - using Base = AllEntities; - - public: - CommitmentLabels() - : AllEntities() - { - Base::Fibonacci_LAST = "Fibonacci_LAST"; - Base::Fibonacci_FIRST = "Fibonacci_FIRST"; - Base::Fibonacci_x = "Fibonacci_x"; - Base::Fibonacci_y = "Fibonacci_y"; - }; - }; - - class VerifierCommitments : public AllEntities { - private: - using Base = AllEntities; - - public: - VerifierCommitments(const std::shared_ptr& verification_key) - { - Fibonacci_LAST = verification_key->Fibonacci_LAST; - Fibonacci_FIRST = verification_key->Fibonacci_FIRST; - } - }; - - class Transcript : public BaseTranscript { - public: - uint32_t circuit_size; - - Commitment Fibonacci_x; - Commitment Fibonacci_y; - - std::vector> sumcheck_univariates; - std::array sumcheck_evaluations; - std::vector zm_cq_comms; - Commitment zm_cq_comm; - Commitment zm_pi_comm; - - Transcript() = default; - - Transcript(const std::vector& proof) - : BaseTranscript(proof) - {} - - void deserialize_full_transcript() - { - size_t num_bytes_read = 0; - circuit_size = deserialize_from_buffer(proof_data, num_bytes_read); - size_t log_n = numeric::get_msb(circuit_size); - - Fibonacci_x = deserialize_from_buffer(BaseTranscript::proof_data, num_bytes_read); - Fibonacci_y = deserialize_from_buffer(BaseTranscript::proof_data, num_bytes_read); - - for (size_t i = 0; i < log_n; ++i) { - sumcheck_univariates.emplace_back( - deserialize_from_buffer>( - BaseTranscript::proof_data, num_bytes_read)); - } - sumcheck_evaluations = - deserialize_from_buffer>(BaseTranscript::proof_data, num_bytes_read); - for (size_t i = 0; i < log_n; ++i) { - zm_cq_comms.push_back(deserialize_from_buffer(proof_data, num_bytes_read)); - } - zm_cq_comm = deserialize_from_buffer(proof_data, num_bytes_read); - zm_pi_comm = deserialize_from_buffer(proof_data, num_bytes_read); - } - - void serialize_full_transcript() - { - size_t old_proof_length = proof_data.size(); - BaseTranscript::proof_data.clear(); - size_t log_n = numeric::get_msb(circuit_size); - - serialize_to_buffer(circuit_size, BaseTranscript::proof_data); - - serialize_to_buffer(Fibonacci_x, BaseTranscript::proof_data); - serialize_to_buffer(Fibonacci_y, BaseTranscript::proof_data); - - for (size_t i = 0; i < log_n; ++i) { - serialize_to_buffer(sumcheck_univariates[i], BaseTranscript::proof_data); - } - serialize_to_buffer(sumcheck_evaluations, BaseTranscript::proof_data); - for (size_t i = 0; i < log_n; ++i) { - serialize_to_buffer(zm_cq_comms[i], proof_data); - } - serialize_to_buffer(zm_cq_comm, proof_data); - serialize_to_buffer(zm_pi_comm, proof_data); - - // sanity check to make sure we generate the same length of proof as before. - ASSERT(proof_data.size() == old_proof_length); - } - }; -}; - -} // namespace flavor -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp index c5bb2a10ab2..80d1ca652e9 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra.hpp @@ -12,6 +12,8 @@ #include "barretenberg/relations/gen_perm_sort_relation.hpp" #include "barretenberg/relations/lookup_relation.hpp" #include "barretenberg/relations/permutation_relation.hpp" +#include "barretenberg/relations/poseidon2_external_relation.hpp" +#include "barretenberg/relations/poseidon2_internal_relation.hpp" #include "barretenberg/relations/relation_parameters.hpp" #include "barretenberg/relations/ultra_arithmetic_relation.hpp" #include "barretenberg/transcript/transcript.hpp" @@ -37,10 +39,10 @@ class GoblinUltra { // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. // Note: this number does not include the individual sorted list polynomials. - static constexpr size_t NUM_ALL_ENTITIES = 53; + static constexpr size_t NUM_ALL_ENTITIES = 55; // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying // assignment of witnesses. We again choose a neutral name. - static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 28; + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 30; // The total number of witness entities not including shifts. static constexpr size_t NUM_WITNESS_ENTITIES = 14; @@ -57,7 +59,9 @@ class GoblinUltra { proof_system::EllipticRelation, proof_system::AuxiliaryRelation, proof_system::EccOpQueueRelation, - proof_system::DatabusLookupRelation>; + proof_system::DatabusLookupRelation, + proof_system::Poseidon2ExternalRelation, + proof_system::Poseidon2InternalRelation>; using Relations = Relations_; using LogDerivLookupRelation = proof_system::DatabusLookupRelation; @@ -90,40 +94,56 @@ class GoblinUltra { public: using DataType = DataType_; DEFINE_FLAVOR_MEMBERS(DataType, - q_m, // column 0 - q_c, // column 1 - q_l, // column 2 - q_r, // column 3 - q_o, // column 4 - q_4, // column 5 - q_arith, // column 6 - q_sort, // column 7 - q_elliptic, // column 8 - q_aux, // column 9 - q_lookup, // column 10 - q_busread, // column 11 - sigma_1, // column 12 - sigma_2, // column 13 - sigma_3, // column 14 - sigma_4, // column 15 - id_1, // column 16 - id_2, // column 17 - id_3, // column 18 - id_4, // column 19 - table_1, // column 20 - table_2, // column 21 - table_3, // column 22 - table_4, // column 23 - lagrange_first, // column 24 - lagrange_last, // column 25 - lagrange_ecc_op, // column 26 // indicator poly for ecc op gates - databus_id) // column 27 // id polynomial, i.e. id_i = i + q_m, // column 0 + q_c, // column 1 + q_l, // column 2 + q_r, // column 3 + q_o, // column 4 + q_4, // column 5 + q_arith, // column 6 + q_sort, // column 7 + q_elliptic, // column 8 + q_aux, // column 9 + q_lookup, // column 10 + q_busread, // column 11 + q_poseidon2_external, // column 12 + q_poseidon2_internal, // column 13 + sigma_1, // column 14 + sigma_2, // column 15 + sigma_3, // column 16 + sigma_4, // column 17 + id_1, // column 18 + id_2, // column 19 + id_3, // column 20 + id_4, // column 21 + table_1, // column 22 + table_2, // column 23 + table_3, // column 24 + table_4, // column 25 + lagrange_first, // column 26 + lagrange_last, // column 27 + lagrange_ecc_op, // column 28 // indicator poly for ecc op gates + databus_id // column 29 // id polynomial, i.e. id_i = i + ) static constexpr CircuitType CIRCUIT_TYPE = CircuitBuilder::CIRCUIT_TYPE; RefVector get_selectors() { - return { q_m, q_c, q_l, q_r, q_o, q_4, q_arith, q_sort, q_elliptic, q_aux, q_lookup, q_busread }; + return { q_m, + q_c, + q_l, + q_r, + q_o, + q_4, + q_arith, + q_sort, + q_elliptic, + q_aux, + q_lookup, + q_busread, + q_poseidon2_external, + q_poseidon2_internal }; }; RefVector get_sigma_polynomials() { return { sigma_1, sigma_2, sigma_3, sigma_4 }; }; RefVector get_id_polynomials() { return { id_1, id_2, id_3, id_4 }; }; @@ -237,6 +257,8 @@ class GoblinUltra { this->q_aux, this->q_lookup, this->q_busread, + this->q_poseidon2_external, + this->q_poseidon2_internal, this->sigma_1, this->sigma_2, this->sigma_3, @@ -402,6 +424,8 @@ class GoblinUltra { q_aux = "__Q_AUX"; q_lookup = "__Q_LOOKUP"; q_busread = "__Q_BUSREAD"; + q_poseidon2_external = "__Q_POSEIDON2_EXTERNAL"; + q_poseidon2_internal = "__Q_POSEIDON2_INTERNAL"; sigma_1 = "__SIGMA_1"; sigma_2 = "__SIGMA_2"; sigma_3 = "__SIGMA_3"; @@ -440,6 +464,8 @@ class GoblinUltra { this->q_aux = verification_key->q_aux; this->q_lookup = verification_key->q_lookup; this->q_busread = verification_key->q_busread; + this->q_poseidon2_external = verification_key->q_poseidon2_external; + this->q_poseidon2_internal = verification_key->q_poseidon2_internal; this->sigma_1 = verification_key->sigma_1; this->sigma_2 = verification_key->sigma_2; this->sigma_3 = verification_key->sigma_3; diff --git a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp index c9dfb4562e1..fb6e0751361 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/goblin_ultra_recursive.hpp @@ -10,15 +10,7 @@ #include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" -#include "barretenberg/relations/auxiliary_relation.hpp" -#include "barretenberg/relations/ecc_op_queue_relation.hpp" -#include "barretenberg/relations/elliptic_relation.hpp" -#include "barretenberg/relations/gen_perm_sort_relation.hpp" -#include "barretenberg/relations/lookup_relation.hpp" -#include "barretenberg/relations/permutation_relation.hpp" -#include "barretenberg/relations/ultra_arithmetic_relation.hpp" #include "barretenberg/srs/factories/crs_factory.hpp" -#include "barretenberg/transcript/transcript.hpp" #include #include #include @@ -54,6 +46,7 @@ template class GoblinUltraRecursive_ { using Commitment = typename Curve::Element; using CommitmentHandle = typename Curve::Element; using FF = typename Curve::ScalarField; + using NativeVerificationKey = flavor::GoblinUltra::VerificationKey; // Note(luke): Eventually this may not be needed at all using VerifierCommitmentKey = pcs::VerifierCommitmentKey; @@ -114,7 +107,7 @@ template class GoblinUltraRecursive_ { * @param builder * @param native_key Native verification key from which to extract the precomputed commitments */ - VerificationKey(CircuitBuilder* builder, const auto& native_key) + VerificationKey(CircuitBuilder* builder, const std::shared_ptr& native_key) : VerificationKey_>(native_key->circuit_size, native_key->num_public_inputs) { @@ -130,6 +123,8 @@ template class GoblinUltraRecursive_ { this->q_aux = Commitment::from_witness(builder, native_key->q_aux); this->q_lookup = Commitment::from_witness(builder, native_key->q_lookup); this->q_busread = Commitment::from_witness(builder, native_key->q_busread); + this->q_poseidon2_external = Commitment::from_witness(builder, native_key->q_poseidon2_external); + this->q_poseidon2_internal = Commitment::from_witness(builder, native_key->q_poseidon2_internal); this->sigma_1 = Commitment::from_witness(builder, native_key->sigma_1); this->sigma_2 = Commitment::from_witness(builder, native_key->sigma_2); this->sigma_3 = Commitment::from_witness(builder, native_key->sigma_3); diff --git a/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp b/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp new file mode 100644 index 00000000000..ba0c7c2b465 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/flavor/toy_avm.hpp @@ -0,0 +1,376 @@ +#pragma once +#include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/commitment_schemes/kzg/kzg.hpp" +#include "barretenberg/ecc/curves/bn254/bn254.hpp" +#include "barretenberg/flavor/flavor.hpp" +#include "barretenberg/flavor/flavor_macros.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/relations/relation_types.hpp" +#include "barretenberg/relations/toy_avm/generic_permutation_relation.hpp" +#include "barretenberg/relations/toy_avm/relation_definer.hpp" +#include "relation_definitions_fwd.hpp" +#include +#include +#include +#include +#include +#include + +// NOLINTBEGIN(cppcoreguidelines-avoid-const-or-ref-data-members) + +namespace proof_system::honk { +namespace flavor { + +/** + * @brief This class provides an example flavor for using GenericPermutationRelations with various settings to make + * integrating those mechanisms into AVM easier + * + */ +class ToyAVM { + public: + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using GroupElement = Curve::Element; + using Commitment = Curve::AffineElement; + using CommitmentHandle = Curve::AffineElement; + using PCS = pcs::kzg::KZG; + using Polynomial = barretenberg::Polynomial; + using PolynomialHandle = std::span; + using CommitmentKey = pcs::CommitmentKey; + using VerifierCommitmentKey = pcs::VerifierCommitmentKey; + + // The number of wires is 5. The set of tuples (permutation_set_column_1,permutation_set_column_2) should be + // equivalent to (permutation_set_column_3, permutation_set_column_4) and the self_permutation_column contains 2 + // subsets which are permutations of each other + static constexpr size_t NUM_WIRES = 5; + + // The number of multivariate polynomials on which a sumcheck prover sumcheck operates (including shifts). We often + // need containers of this size to hold related data, so we choose a name more agnostic than `NUM_POLYNOMIALS`. + // Note: this number does not include the individual sorted list polynomials. + static constexpr size_t NUM_ALL_ENTITIES = 12; + // The number of polynomials precomputed to describe a circuit and to aid a prover in constructing a satisfying + // assignment of witnesses. We again choose a neutral name. + static constexpr size_t NUM_PRECOMPUTED_ENTITIES = 5; + // The total number of witness entities not including shifts. + static constexpr size_t NUM_WITNESS_ENTITIES = 7; + + // define the tuple of Relations that comprise the Sumcheck relation + using Relations = std::tuple>; + + static constexpr size_t MAX_PARTIAL_RELATION_LENGTH = compute_max_partial_relation_length(); + + // BATCHED_RELATION_PARTIAL_LENGTH = algebraic degree of sumcheck relation *after* multiplying by the `pow_zeta` + // random polynomial e.g. For \sum(x) [A(x) * B(x) + C(x)] * PowZeta(X), relation length = 2 and random relation + // length = 3 + static constexpr size_t BATCHED_RELATION_PARTIAL_LENGTH = MAX_PARTIAL_RELATION_LENGTH + 1; + static constexpr size_t NUM_RELATIONS = std::tuple_size::value; + + // Instantiate the BarycentricData needed to extend each Relation Univariate + + // define the containers for storing the contributions from each relation in Sumcheck + using SumcheckTupleOfTuplesOfUnivariates = decltype(create_sumcheck_tuple_of_tuples_of_univariates()); + using TupleOfArraysOfValues = decltype(create_tuple_of_arrays_of_values()); + + private: + /** + * @brief A base class labelling precomputed entities and (ordered) subsets of interest. + * @details Used to build the proving key and verification key. + */ + template class PrecomputedEntities : public PrecomputedEntitiesBase { + public: + using DataType = DataType_; + DEFINE_FLAVOR_MEMBERS(DataType, + lagrange_first, // column 0 + enable_tuple_set_permutation, // column 1 + enable_single_column_permutation, // column 2 + enable_first_set_permutation, // column 3 + enable_second_set_permutation) // column 4 + + RefVector get_selectors() + { + return { lagrange_first, + enable_tuple_set_permutation, + enable_single_column_permutation, + enable_first_set_permutation, + enable_second_set_permutation }; + }; + RefVector get_sigma_polynomials() { return {}; }; + RefVector get_id_polynomials() { return {}; }; + RefVector get_table_polynomials() { return {}; }; + }; + + /** + * @brief Container for all witness polynomials used/constructed by the prover. + * @details Shifts are not included here since they do not occupy their own memory. + */ + + template class WitnessEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, + permutation_set_column_1, // Column 0 + permutation_set_column_2, // Column 1 + permutation_set_column_3, // Column 2 + permutation_set_column_4, // Column 3 + self_permutation_column, // Column 4 + tuple_permutation_inverses, // Column 5 + single_permutation_inverses) // Column 6 + + RefVector get_wires() + { + return { permutation_set_column_1, + permutation_set_column_2, + permutation_set_column_3, + permutation_set_column_4, + self_permutation_column }; + }; + }; + + /** + * @brief A base class labelling all entities (for instance, all of the polynomials used by the prover during + * sumcheck) in this Honk variant along with particular subsets of interest + * @details Used to build containers for: the prover's polynomial during sumcheck; the sumcheck's folded + * polynomials; the univariates consturcted during during sumcheck; the evaluations produced by sumcheck. + * + * Symbolically we have: AllEntities = PrecomputedEntities + WitnessEntities + "ShiftedEntities". It could be + * implemented as such, but we have this now. + */ + + template class AllEntities { + public: + DEFINE_FLAVOR_MEMBERS(DataType, + lagrange_first, // Column 0 + enable_tuple_set_permutation, // Column 1 + enable_single_column_permutation, // Column 2 + enable_first_set_permutation, // Column 3 + enable_second_set_permutation, // Column 4 + permutation_set_column_1, // Column 5 + permutation_set_column_2, // Column 6 + permutation_set_column_3, // Column 7 + permutation_set_column_4, // Column 8 + self_permutation_column, // Column 9 + tuple_permutation_inverses, // Column 10 + single_permutation_inverses) // Column 11 + + RefVector get_wires() + { + return { + permutation_set_column_1, permutation_set_column_2, permutation_set_column_3, permutation_set_column_4 + }; + }; + RefVector get_unshifted() + { + return { lagrange_first, + enable_tuple_set_permutation, + enable_single_column_permutation, + enable_first_set_permutation, + enable_second_set_permutation, + permutation_set_column_1, + permutation_set_column_2, + permutation_set_column_3, + permutation_set_column_4, + self_permutation_column, + tuple_permutation_inverses, + single_permutation_inverses }; + }; + RefVector get_to_be_shifted() { return {}; }; + RefVector get_shifted() { return {}; }; + }; + + public: + /** + * @brief The proving key is responsible for storing the polynomials used by the prover. + * @note TODO(Cody): Maybe multiple inheritance is the right thing here. In that case, nothing should eve inherit + * from ProvingKey. + */ + class ProvingKey : public ProvingKey_, WitnessEntities> { + public: + // Expose constructors on the base class + using Base = ProvingKey_, WitnessEntities>; + using Base::Base; + + // The plookup wires that store plookup read data. + std::array get_table_column_wires() { return {}; }; + }; + + /** + * @brief The verification key is responsible for storing the the commitments to the precomputed (non-witnessk) + * polynomials used by the verifier. + * + * @note Note the discrepancy with what sort of data is stored here vs in the proving key. We may want to resolve + * that, and split out separate PrecomputedPolynomials/Commitments data for clarity but also for portability of our + * circuits. + */ + using VerificationKey = VerificationKey_>; + + /** + * @brief A field element for each entity of the flavor. These entities represent the prover polynomials evaluated + * at one point. + */ + class AllValues : public AllEntities { + public: + using Base = AllEntities; + using Base::Base; + }; + + /** + * @brief An owning container of polynomials. + * @warning When this was introduced it broke some of our design principles. + * - Execution trace builders don't handle "polynomials" because the interpretation of the execution trace + * columns as polynomials is a detail of the proving system, and trace builders are (sometimes in practice, + * always in principle) reusable for different proving protocols (e.g., Plonk and Honk). + * - Polynomial storage is handled by key classes. Polynomials aren't moved, but are accessed elsewhere by + * std::spans. + * + * We will consider revising this data model: TODO(https://github.com/AztecProtocol/barretenberg/issues/743) + */ + class AllPolynomials : public AllEntities { + public: + [[nodiscard]] size_t get_polynomial_size() const { return this->lagrange_first.size(); } + AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), this->get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + /** + * @brief A container for polynomials handles; only stores spans. + */ + class ProverPolynomials : public AllEntities { + public: + [[nodiscard]] size_t get_polynomial_size() const { return enable_tuple_set_permutation.size(); } + [[nodiscard]] AllValues get_row(const size_t row_idx) const + { + AllValues result; + for (auto [result_field, polynomial] : zip_view(result.get_all(), get_all())) { + result_field = polynomial[row_idx]; + } + return result; + } + }; + + /** + * @brief A container for storing the partially evaluated multivariates produced by sumcheck. + */ + class PartiallyEvaluatedMultivariates : public AllEntities { + + public: + PartiallyEvaluatedMultivariates() = default; + PartiallyEvaluatedMultivariates(const size_t circuit_size) + { + // Storage is only needed after the first partial evaluation, hence polynomials of size (n / 2) + for (auto& poly : this->get_all()) { + poly = Polynomial(circuit_size / 2); + } + } + }; + /** + * @brief A container for univariates used during Protogalaxy folding and sumcheck. + * @details During folding and sumcheck, the prover evaluates the relations on these univariates. + */ + template using ProverUnivariates = AllEntities>; + + /** + * @brief A container for univariates produced during the hot loop in sumcheck. + */ + using ExtendedEdges = ProverUnivariates; + + /** + * @brief A container for the witness commitments. + */ + + using WitnessCommitments = WitnessEntities; + + /** + * @brief A container for commitment labels. + * @note It's debatable whether this should inherit from AllEntities. since most entries are not strictly needed. It + * has, however, been useful during debugging to have these labels available. + * + */ + class CommitmentLabels : public AllEntities { + private: + using Base = AllEntities; + + public: + CommitmentLabels() + : AllEntities() + { + Base::permutation_set_column_1 = "PERMUTATION_SET_COLUMN_1"; + Base::permutation_set_column_2 = "PERMUTATION_SET_COLUMN_2"; + Base::permutation_set_column_3 = "PERMUTATION_SET_COLUMN_3"; + Base::permutation_set_column_4 = "PERMUTATION_SET_COLUMN_4"; + Base::self_permutation_column = "SELF_PERMUTATION_COLUMN"; + Base::tuple_permutation_inverses = "TUPLE_PERMUTATION_INVERSES"; + Base::single_permutation_inverses = "SINGLE_PERMUTATION_INVERSES"; + // The ones beginning with "__" are only used for debugging + Base::lagrange_first = "__LAGRANGE_FIRST"; + Base::enable_tuple_set_permutation = "__ENABLE_SET_PERMUTATION"; + Base::enable_single_column_permutation = "__ENABLE_SINGLE_COLUMN_PERMUTATION"; + Base::enable_first_set_permutation = "__ENABLE_FIRST_SET_PERMUTATION"; + Base::enable_second_set_permutation = "__ENABLE_SECOND_SET_PERMUTATION"; + }; + }; + + class VerifierCommitments : public AllEntities { + + public: + VerifierCommitments(const std::shared_ptr& verification_key) + { + lagrange_first = verification_key->lagrange_first; + enable_tuple_set_permutation = verification_key->enable_tuple_set_permutation; + enable_single_column_permutation = verification_key->enable_single_column_permutation; + enable_first_set_permutation = verification_key->enable_first_set_permutation; + enable_second_set_permutation = verification_key->enable_second_set_permutation; + } + }; + + /** + * @brief Derived class that defines proof structure for ECCVM proofs, as well as supporting functions. + * + */ + class Transcript : public BaseTranscript { + public: + uint32_t circuit_size; + Commitment column_0_comm; + Commitment column_1_comm; + Commitment permutation_inverses_comm; + std::vector> sumcheck_univariates; + std::array sumcheck_evaluations; + + std::vector zm_cq_comms; + Commitment zm_cq_comm; + Commitment zm_pi_comm; + + Transcript() = default; + + Transcript(const std::vector& proof) + : BaseTranscript(proof) + {} + + void deserialize_full_transcript() + { + // TODO. Codepath is dead for now, because there is no composer + abort(); + // take current proof and put them into the struct + } + + void serialize_full_transcript() + { + // TODO. Codepath is dead for now, because there is no composer + abort(); + } + }; +}; + +// NOLINTEND(cppcoreguidelines-avoid-const-or-ref-data-members) + +} // namespace flavor +namespace sumcheck { + +DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(GenericPermutationRelationImpl, flavor::ToyAVM) + +} // namespace sumcheck +} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp b/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp index 6caba7a2294..df0ac96c120 100644 --- a/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp +++ b/barretenberg/cpp/src/barretenberg/flavor/ultra_recursive.hpp @@ -54,6 +54,7 @@ template class UltraRecursive_ { using Commitment = typename Curve::Element; using CommitmentHandle = typename Curve::Element; using FF = typename Curve::ScalarField; + using NativeVerificationKey = flavor::Ultra::VerificationKey; // Note(luke): Eventually this may not be needed at all using VerifierCommitmentKey = pcs::VerifierCommitmentKey; @@ -250,7 +251,7 @@ template class UltraRecursive_ { * @param builder * @param native_key Native verification key from which to extract the precomputed commitments */ - VerificationKey(CircuitBuilder* builder, auto native_key) + VerificationKey(CircuitBuilder* builder, const std::shared_ptr& native_key) : VerificationKey_>(native_key->circuit_size, native_key->num_public_inputs) { this->q_m = Commitment::from_witness(builder, native_key->q_m); diff --git a/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt index 248b05c02e6..a6c3c61383a 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/goblin/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(goblin ultra_honk eccvm translator_vm) \ No newline at end of file +barretenberg_module(goblin ultra_honk eccvm translator_vm transcript) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp b/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp index 09563987010..7503307881a 100644 --- a/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/goblin/full_goblin_composer.test.cpp @@ -1,4 +1,6 @@ #include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/goblin/goblin.hpp" +#include "barretenberg/goblin/mock_circuits.hpp" #include "barretenberg/goblin/translation_evaluations.hpp" #include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" #include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" @@ -8,14 +10,11 @@ #include +using namespace barretenberg; using namespace proof_system::honk; namespace test_full_goblin_composer { -namespace { -auto& engine = numeric::random::get_debug_engine(); -} - class FullGoblinComposerTests : public ::testing::Test { protected: static void SetUpTestSuite() @@ -30,110 +29,11 @@ class FullGoblinComposerTests : public ::testing::Test { using Point = Curve::AffineElement; using CommitmentKey = pcs::CommitmentKey; using OpQueue = proof_system::ECCOpQueue; + using GoblinUltraBuilder = proof_system::GoblinUltraCircuitBuilder; using ECCVMFlavor = flavor::ECCVM; using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; using ECCVMComposer = ECCVMComposer_; - - static constexpr size_t NUM_OP_QUEUE_COLUMNS = flavor::GoblinUltra::NUM_WIRES; - - /** - * @brief Generate a simple test circuit with some ECC op gates and conventional arithmetic gates - * - * @param builder - */ - static void generate_test_circuit(proof_system::GoblinUltraCircuitBuilder& builder) - { - // Add some arbitrary ecc op gates - for (size_t i = 0; i < 3; ++i) { - auto point = Point::random_element(); - auto scalar = FF::random_element(); - builder.queue_ecc_add_accum(point); - builder.queue_ecc_mul_accum(point, scalar); - } - // queues the result of the preceding ECC - builder.queue_ecc_eq(); // should be eq and reset - - // Add some conventional gates that utilize public inputs - for (size_t i = 0; i < 10; ++i) { - FF a = FF::random_element(); - FF b = FF::random_element(); - FF c = FF::random_element(); - FF d = a + b + c; - uint32_t a_idx = builder.add_public_variable(a); - uint32_t b_idx = builder.add_variable(b); - uint32_t c_idx = builder.add_variable(c); - uint32_t d_idx = builder.add_variable(d); - - builder.create_big_add_gate({ a_idx, b_idx, c_idx, d_idx, FF(1), FF(1), FF(1), FF(-1), FF(0) }); - } - } - - /** - * @brief Mock the interactions of a simple curcuit with the op_queue - * @details The transcript aggregation protocol in the Goblin proof system can not yet support an empty "previous - * transcript" (see issue #723). This function mocks the interactions with the op queue of a fictional "first" - * circuit. This way, when we go to generate a proof over our first "real" circuit, the transcript aggregation - * protocol can proceed nominally. The mock data is valid in the sense that it can be processed by all stages of - * Goblin as if it came from a genuine circuit. - * - * @todo WOKTODO: this is a zero commitments issue - * - * @param op_queue - */ - static void perform_op_queue_interactions_for_mock_first_circuit( - std::shared_ptr& op_queue) - { - proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; - - // Add a mul accum op and an equality op - auto point = Point::one() * FF::random_element(); - auto scalar = FF::random_element(); - builder.queue_ecc_mul_accum(point, scalar); - builder.queue_ecc_eq(); - - op_queue->set_size_data(); - - // Manually compute the op queue transcript commitments (which would normally be done by the prover) - auto crs_factory_ = barretenberg::srs::get_crs_factory(); - auto commitment_key = CommitmentKey(op_queue->get_current_size(), crs_factory_); - std::array op_queue_commitments; - size_t idx = 0; - for (auto& entry : op_queue->get_aggregate_transcript()) { - op_queue_commitments[idx++] = commitment_key.commit(entry); - } - // Store the commitment data for use by the prover of the next circuit - op_queue->set_commitment_data(op_queue_commitments); - } - - /** - * @brief Construct and a verify a Honk proof - * - */ - static bool construct_and_verify_honk_proof(GoblinUltraComposer& composer, - proof_system::GoblinUltraCircuitBuilder& builder) - { - auto instance = composer.create_instance(builder); - auto prover = composer.create_prover(instance); - auto verifier = composer.create_verifier(instance); - auto proof = prover.construct_proof(); - bool verified = verifier.verify_proof(proof); - - return verified; - } - - /** - * @brief Construct and verify a Goblin ECC op queue merge proof - * - */ - static bool construct_and_verify_merge_proof(GoblinUltraComposer& composer, std::shared_ptr& op_queue) - { - auto merge_prover = composer.create_merge_prover(op_queue); - auto merge_verifier = composer.create_merge_verifier(/*srs_size=*/10); - auto merge_proof = merge_prover.construct_proof(); - bool verified = merge_verifier.verify_proof(merge_proof); - - return verified; - } + using KernelInput = Goblin::AccumulationOutput; }; /** @@ -145,52 +45,23 @@ class FullGoblinComposerTests : public ::testing::Test { */ TEST_F(FullGoblinComposerTests, SimpleCircuit) { - auto op_queue = std::make_shared(); - - // Add mock data to op queue to simulate interaction with a "first" circuit - perform_op_queue_interactions_for_mock_first_circuit(op_queue); + barretenberg::Goblin goblin; + GoblinUltraBuilder initial_circuit{ goblin.op_queue }; + GoblinTestingUtils::construct_simple_initial_circuit(initial_circuit); + KernelInput kernel_input = goblin.accumulate(initial_circuit); // Construct a series of simple Goblin circuits; generate and verify their proofs - size_t NUM_CIRCUITS = 3; + size_t NUM_CIRCUITS = 2; for (size_t circuit_idx = 0; circuit_idx < NUM_CIRCUITS; ++circuit_idx) { - proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; - - generate_test_circuit(builder); - - // The same composer is used to manage Honk and Merge prover/verifier - proof_system::honk::GoblinUltraComposer composer; - - // Construct and verify Ultra Goblin Honk proof - bool honk_verified = construct_and_verify_honk_proof(composer, builder); - EXPECT_TRUE(honk_verified); - - // Construct and verify op queue merge proof - bool merge_verified = construct_and_verify_merge_proof(composer, op_queue); - EXPECT_TRUE(merge_verified); + GoblinUltraBuilder circuit_builder{ goblin.op_queue }; + GoblinTestingUtils::construct_arithmetic_circuit(circuit_builder); + kernel_input = goblin.accumulate(circuit_builder); } - // Execute the ECCVM - // TODO(https://github.com/AztecProtocol/barretenberg/issues/785) Properly initialize transcript - auto eccvm_builder = ECCVMBuilder(op_queue); - auto eccvm_composer = ECCVMComposer(); - auto eccvm_prover = eccvm_composer.create_prover(eccvm_builder); - auto eccvm_verifier = eccvm_composer.create_verifier(eccvm_builder); - auto eccvm_proof = eccvm_prover.construct_proof(); - bool eccvm_verified = eccvm_verifier.verify_proof(eccvm_proof); - EXPECT_TRUE(eccvm_verified); - - // Execute the Translator - // TODO(https://github.com/AztecProtocol/barretenberg/issues/786) Properly derive batching_challenge - auto batching_challenge = Fbase::random_element(); - auto evaluation_input = eccvm_prover.evaluation_challenge_x; - proof_system::GoblinTranslatorCircuitBuilder translator_builder{ batching_challenge, evaluation_input, op_queue }; - GoblinTranslatorComposer translator_composer; - GoblinTranslatorProver translator_prover = translator_composer.create_prover(translator_builder); - GoblinTranslatorVerifier translator_verifier = translator_composer.create_verifier(translator_builder); - proof_system::plonk::proof translator_proof = translator_prover.construct_proof(); - bool accumulator_construction_verified = translator_verifier.verify_proof(translator_proof); - bool translation_verified = translator_verifier.verify_translation(eccvm_prover.translation_evaluations); - EXPECT_TRUE(accumulator_construction_verified && translation_verified); + Goblin::Proof proof = goblin.prove(); + bool verified = goblin.verify(proof); + EXPECT_TRUE(verified); } + // TODO(https://github.com/AztecProtocol/barretenberg/issues/787) Expand these tests. } // namespace test_full_goblin_composer diff --git a/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp new file mode 100644 index 00000000000..315b66657f3 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/goblin/goblin.hpp @@ -0,0 +1,106 @@ +#pragma once + +#include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/translator_vm/goblin_translator_composer.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +namespace barretenberg { + +class Goblin { + using HonkProof = proof_system::plonk::proof; + + public: + /** + * @brief Output of goblin::accumulate; an Ultra proof and the corresponding verification key + * + */ + struct AccumulationOutput { + using NativeVerificationKey = proof_system::honk::flavor::GoblinUltra::VerificationKey; + HonkProof proof; + std::shared_ptr verification_key; + }; + + struct Proof { + HonkProof eccvm_proof; + HonkProof translator_proof; + TranslationEvaluations translation_evaluations; + }; + + using Fr = barretenberg::fr; + using Fq = barretenberg::fq; + + using Transcript = proof_system::honk::BaseTranscript; + using GoblinUltraComposer = proof_system::honk::GoblinUltraComposer; + using GoblinUltraCircuitBuilder = proof_system::GoblinUltraCircuitBuilder; + using OpQueue = proof_system::ECCOpQueue; + using ECCVMFlavor = proof_system::honk::flavor::ECCVM; + using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; + using ECCVMComposer = proof_system::honk::ECCVMComposer; + using TranslatorBuilder = proof_system::GoblinTranslatorCircuitBuilder; + using TranslatorComposer = proof_system::honk::GoblinTranslatorComposer; + + std::shared_ptr op_queue = std::make_shared(); + + private: + // TODO(https://github.com/AztecProtocol/barretenberg/issues/798) unique_ptr use is a hack + std::unique_ptr eccvm_builder; + std::unique_ptr translator_builder; + std::unique_ptr eccvm_composer; + std::unique_ptr translator_composer; + + public: + /** + * @brief + * + * @param circuit_builder + */ + AccumulationOutput accumulate(GoblinUltraCircuitBuilder& circuit_builder) + { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/797) Complete the "kernel" logic by recursively + // verifying previous merge proof + + GoblinUltraComposer composer; + auto instance = composer.create_instance(circuit_builder); + auto prover = composer.create_prover(instance); + auto ultra_proof = prover.construct_proof(); + + auto merge_prover = composer.create_merge_prover(op_queue); + [[maybe_unused]] auto merge_proof = merge_prover.construct_proof(); + + return { ultra_proof, instance->verification_key }; + }; + + Proof prove() + { + Proof proof; + eccvm_builder = std::make_unique(op_queue); + eccvm_composer = std::make_unique(); + auto eccvm_prover = eccvm_composer->create_prover(*eccvm_builder); + proof.eccvm_proof = eccvm_prover.construct_proof(); + proof.translation_evaluations = eccvm_prover.translation_evaluations; + + translator_builder = std::make_unique( + eccvm_prover.translation_batching_challenge_v, eccvm_prover.evaluation_challenge_x, op_queue); + translator_composer = std::make_unique(); + auto translator_prover = translator_composer->create_prover(*translator_builder, eccvm_prover.transcript); + proof.translator_proof = translator_prover.construct_proof(); + return proof; + }; + + bool verify(const Proof& proof) + { + auto eccvm_verifier = eccvm_composer->create_verifier(*eccvm_builder); + bool eccvm_verified = eccvm_verifier.verify_proof(proof.eccvm_proof); + + auto translator_verifier = translator_composer->create_verifier(*translator_builder, eccvm_verifier.transcript); + bool accumulator_construction_verified = translator_verifier.verify_proof(proof.translator_proof); + // TODO(https://github.com/AztecProtocol/barretenberg/issues/799): + // Ensure translation_evaluations are passed correctly + bool translation_verified = translator_verifier.verify_translation(proof.translation_evaluations); + return eccvm_verified && accumulator_construction_verified && translation_verified; + }; +}; +} // namespace barretenberg \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp new file mode 100644 index 00000000000..cb12bb08c37 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/goblin/mock_circuits.hpp @@ -0,0 +1,101 @@ +#include "barretenberg/commitment_schemes/commitment_key.hpp" +#include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/goblin/goblin.hpp" +#include "barretenberg/goblin/translation_evaluations.hpp" +#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" +#include "barretenberg/translator_vm/goblin_translator_composer.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +namespace barretenberg { +class GoblinTestingUtils { + public: + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using Fbase = Curve::BaseField; + using Point = Curve::AffineElement; + using CommitmentKey = proof_system::honk::pcs::CommitmentKey; + using OpQueue = proof_system::ECCOpQueue; + using GoblinUltraBuilder = proof_system::GoblinUltraCircuitBuilder; + using Flavor = proof_system::honk::flavor::GoblinUltra; + static constexpr size_t NUM_OP_QUEUE_COLUMNS = Flavor::NUM_WIRES; + + static void construct_arithmetic_circuit(GoblinUltraBuilder& builder) + { + // Add some arithmetic gates that utilize public inputs + for (size_t i = 0; i < 10; ++i) { + FF a = FF::random_element(); + FF b = FF::random_element(); + FF c = FF::random_element(); + FF d = a + b + c; + uint32_t a_idx = builder.add_public_variable(a); + uint32_t b_idx = builder.add_variable(b); + uint32_t c_idx = builder.add_variable(c); + uint32_t d_idx = builder.add_variable(d); + + builder.create_big_add_gate({ a_idx, b_idx, c_idx, d_idx, FF(1), FF(1), FF(1), FF(-1), FF(0) }); + } + } + + /** + * @brief Mock the interactions of a simple curcuit with the op_queue + * @todo The transcript aggregation protocol in the Goblin proof system can not yet support an empty "previous + * transcript" (see issue #723) because the corresponding commitments are zero / the point at infinity. This + * function mocks the interactions with the op queue of a fictional "first" circuit. This way, when we go to + * generate a proof over our first "real" circuit, the transcript aggregation protocol can proceed nominally. The + * mock data is valid in the sense that it can be processed by all stages of Goblin as if it came from a genuine + * circuit. + * + * + * @param op_queue + */ + static void perform_op_queue_interactions_for_mock_first_circuit( + std::shared_ptr& op_queue) + { + proof_system::GoblinUltraCircuitBuilder builder{ op_queue }; + + // Add a mul accum op and an equality op + auto point = Point::one() * FF::random_element(); + auto scalar = FF::random_element(); + builder.queue_ecc_mul_accum(point, scalar); + builder.queue_ecc_eq(); + + op_queue->set_size_data(); + + // Manually compute the op queue transcript commitments (which would normally be done by the merge prover) + auto crs_factory_ = barretenberg::srs::get_crs_factory(); + auto commitment_key = CommitmentKey(op_queue->get_current_size(), crs_factory_); + std::array op_queue_commitments; + size_t idx = 0; + for (auto& entry : op_queue->get_aggregate_transcript()) { + op_queue_commitments[idx++] = commitment_key.commit(entry); + } + // Store the commitment data for use by the prover of the next circuit + op_queue->set_commitment_data(op_queue_commitments); + } + + /** + * @brief Generate a simple test circuit with some ECC op gates and conventional arithmetic gates + * + * @param builder + */ + static void construct_simple_initial_circuit(GoblinUltraBuilder& builder) + { + // TODO(https://github.com/AztecProtocol/barretenberg/issues/800) Testing cleanup + perform_op_queue_interactions_for_mock_first_circuit(builder.op_queue); + + // Add some arbitrary ecc op gates + for (size_t i = 0; i < 3; ++i) { + auto point = Point::random_element(); + auto scalar = FF::random_element(); + builder.queue_ecc_add_accum(point); + builder.queue_ecc_mul_accum(point, scalar); + } + // queues the result of the preceding ECC + builder.queue_ecc_eq(); // should be eq and reset + + construct_arithmetic_circuit(builder); + } +}; +} // namespace barretenberg \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp b/barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp similarity index 58% rename from barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp rename to barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp index 820bc2907a5..4a86cf74075 100644 --- a/barretenberg/cpp/src/barretenberg/honk/proof_system/lookup_library.hpp +++ b/barretenberg/cpp/src/barretenberg/honk/proof_system/logderivative_library.hpp @@ -1,7 +1,7 @@ #pragma once #include -namespace proof_system::honk::lookup_library { +namespace proof_system::honk::logderivative_library { /** * @brief Compute the inverse polynomial I(X) required for logderivative lookups @@ -29,12 +29,12 @@ void compute_logderivative_inverse(Polynomials& polynomials, auto& relation_para using Accumulator = typename Relation::ValueAccumulator0; constexpr size_t READ_TERMS = Relation::READ_TERMS; constexpr size_t WRITE_TERMS = Relation::WRITE_TERMS; - auto& inverse_polynomial = polynomials.lookup_inverses; auto lookup_relation = Relation(); + auto& inverse_polynomial = lookup_relation.template get_inverse_polynomial(polynomials); for (size_t i = 0; i < circuit_size; ++i) { auto row = polynomials.get_row(i); - bool has_inverse = lookup_relation.lookup_exists_at_row(row); + bool has_inverse = lookup_relation.operation_exists_at_row(row); if (!has_inverse) { continue; } @@ -97,7 +97,7 @@ void accumulate_logderivative_lookup_subrelation_contributions(ContainerOverSubr using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; using View = typename Accumulator::View; - auto lookup_inverses = View(in.lookup_inverses); + auto lookup_inverses = View(lookup_relation.template get_inverse_polynomial(in)); constexpr size_t NUM_TOTAL_TERMS = READ_TERMS + WRITE_TERMS; std::array lookup_terms; @@ -153,4 +153,98 @@ void accumulate_logderivative_lookup_subrelation_contributions(ContainerOverSubr }); } -} // namespace proof_system::honk::lookup_library \ No newline at end of file +/** + * @brief Compute generic log-derivative set permutation subrelation accumulation + * @details The generic log-derivative lookup relation consistes of two subrelations. The first demonstrates that the + * inverse polynomial I, defined via I = 1/[(read_term) * (write_term)], has been computed correctly. The second + * establishes the correctness of the permutation itself based on the log-derivative argument. Note that the + * latter subrelation is "linearly dependent" in the sense that it establishes that a sum across all rows of the + * execution trace is zero, rather than that some expression holds independently at each row. Accordingly, this + * subrelation is not multiplied by a scaling factor at each accumulation step. The subrelation expressions are + * respectively: + * + * I * (read_term) * (write_term) - q_{permutation_enabler} = 0 + * + * \sum_{i=0}^{n-1} [q_{write_enabler} * I * write_term + q_{read_enabler} * I * read_term] = 0 + * + * The explicit expressions for read_term and write_term are dependent upon the particular structure of the permutation + * being performed and methods for computing them must be defined in the corresponding relation class. The entities + * which are used to determine the use of permutation (is it enabled, is the first "read" set enabled, is the second + * "write" set enabled) must be defined in the relation class. + * + * @tparam FF + * @tparam Relation + * @tparam ContainerOverSubrelations + * @tparam AllEntities + * @tparam Parameters + * @param accumulator + * @param in + * @param params + * @param scaling_factor + */ +template +void accumulate_logderivative_permutation_subrelation_contributions(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor) +{ + constexpr size_t READ_TERMS = Relation::READ_TERMS; + constexpr size_t WRITE_TERMS = Relation::WRITE_TERMS; + + // For now we only do simple permutations over tuples with 1 read and 1 write term + static_assert(READ_TERMS == 1); + static_assert(WRITE_TERMS == 1); + + auto permutation_relation = Relation(); + + using Accumulator = typename std::tuple_element_t<0, ContainerOverSubrelations>; + using View = typename Accumulator::View; + + auto permutation_inverses = View(permutation_relation.template get_inverse_polynomial(in)); + + constexpr size_t NUM_TOTAL_TERMS = 2; + std::array permutation_terms; + std::array denominator_accumulator; + + // The permutation relation = 1 / read_term - 1 / write_term + // To get the inverses (1 / read_term), (1 / write_term), we have a commitment to the product ofinver ses + // i.e. permutation_inverses = (1 / read_term) * (1 / write_term) + // The purpose of this next section is to derive individual inverse terms using `permutation_inverses` + // i.e. (1 / read_term) = permutation_inverses * write_term + // (1 / write_term) = permutation_inverses * read_term + permutation_terms[0] = permutation_relation.template compute_read_term(in, params); + permutation_terms[1] = permutation_relation.template compute_write_term(in, params); + + barretenberg::constexpr_for<0, NUM_TOTAL_TERMS, 1>( + [&]() { denominator_accumulator[i] = permutation_terms[i]; }); + + barretenberg::constexpr_for<0, NUM_TOTAL_TERMS - 1, 1>( + [&]() { denominator_accumulator[i + 1] *= denominator_accumulator[i]; }); + + auto inverse_accumulator = Accumulator(permutation_inverses); // denominator_accumulator[NUM_TOTAL_TERMS - 1]; + + const auto inverse_exists = permutation_relation.template compute_inverse_exists(in); + + // Note: the lookup_inverses are computed so that the value is 0 if !inverse_exists + std::get<0>(accumulator) += + (denominator_accumulator[NUM_TOTAL_TERMS - 1] * permutation_inverses - inverse_exists) * scaling_factor; + + // After this algo, total degree of denominator_accumulator = NUM_TOTAL_TERMS + for (size_t i = 0; i < NUM_TOTAL_TERMS - 1; ++i) { + denominator_accumulator[NUM_TOTAL_TERMS - 1 - i] = + denominator_accumulator[NUM_TOTAL_TERMS - 2 - i] * inverse_accumulator; + inverse_accumulator = inverse_accumulator * permutation_terms[NUM_TOTAL_TERMS - 1 - i]; + } + denominator_accumulator[0] = inverse_accumulator; + + // each predicate is degree-1 + // degree of relation at this point = NUM_TOTAL_TERMS + 1 + std::get<1>(accumulator) += + permutation_relation.template compute_read_term_predicate(in) * denominator_accumulator[0]; + + // each predicate is degree-1 + // degree of relation = NUM_TOTAL_TERMS + 1 + std::get<1>(accumulator) -= + permutation_relation.template compute_write_term_predicate(in) * denominator_accumulator[1]; +} +} // namespace proof_system::honk::logderivative_library \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/arithmetization.hpp b/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/arithmetization.hpp index c6dffdeaa3a..05121c857f4 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/arithmetization.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/arithmetization.hpp @@ -118,7 +118,7 @@ template class Ultra { template class UltraHonk { public: static constexpr size_t NUM_WIRES = 4; - static constexpr size_t NUM_SELECTORS = 12; + static constexpr size_t NUM_SELECTORS = 14; using FF = FF_; using SelectorType = std::vector>; @@ -137,7 +137,9 @@ template class UltraHonk { SelectorType& q_elliptic() { return selectors[8]; }; SelectorType& q_aux() { return selectors[9]; }; SelectorType& q_lookup_type() { return selectors[10]; }; - SelectorType& q_busread() { return this->selectors[11]; }; + SelectorType& q_busread() { return selectors[11]; }; + SelectorType& q_poseidon2_external() { return this->selectors[12]; }; + SelectorType& q_poseidon2_internal() { return this->selectors[13]; }; const auto& get() const { return selectors; }; @@ -154,7 +156,12 @@ template class UltraHonk { * Ultra arithmetization * */ - void pad_additional() { q_busread().emplace_back(0); }; + void pad_additional() + { + q_busread().emplace_back(0); + q_poseidon2_external().emplace_back(0); + q_poseidon2_internal().emplace_back(0); + }; // Note: Unused. Needed only for consistency with Ultra arith (which is used by Plonk) inline static const std::vector selector_names = {}; diff --git a/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/gate_data.hpp b/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/gate_data.hpp index 4f8f799c42b..91540d90d72 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/gate_data.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/arithmetization/gate_data.hpp @@ -132,4 +132,20 @@ template struct ecc_dbl_gate_ { uint32_t x3; uint32_t y3; }; + +template struct poseidon2_external_gate_ { + uint32_t a; + uint32_t b; + uint32_t c; + uint32_t d; + uint32_t round_idx; +}; + +template struct poseidon2_internal_gate_ { + uint32_t a; + uint32_t b; + uint32_t c; + uint32_t d; + uint32_t round_idx; +}; } // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp index 4abbd5bc91f..bdcbd4fa4ad 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp @@ -7,7 +7,7 @@ #include "barretenberg/ecc/curves/bn254/fr.hpp" #include "barretenberg/ecc/curves/grumpkin/grumpkin.hpp" #include "barretenberg/flavor/ecc_vm.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/proof_system/op_queue/ecc_op_queue.hpp" #include "barretenberg/relations/relation_parameters.hpp" @@ -505,9 +505,9 @@ template class ECCVMCircuitBuilder { auto polynomials = compute_polynomials(); const size_t num_rows = polynomials.get_polynomial_size(); - proof_system::honk::lookup_library::compute_logderivative_inverse>( - polynomials, params, num_rows); + proof_system::honk::logderivative_library:: + compute_logderivative_inverse>( + polynomials, params, num_rows); honk::permutation_library::compute_permutation_grand_product>( num_rows, polynomials, params); diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp deleted file mode 100644 index a233692ebf7..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp +++ /dev/null @@ -1,99 +0,0 @@ - - -// AUTOGENERATED FILE -#pragma once - -#include "barretenberg/common/throw_or_abort.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/circuit_builder/circuit_builder_base.hpp" - -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -class FibCircuitBuilder { - public: - using Flavor = proof_system::honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Row = Fib_vm::Row; - - // TODO: template - using Polynomial = Flavor::Polynomial; - using AllPolynomials = Flavor::AllPolynomials; - - static constexpr size_t num_fixed_columns = 6; - static constexpr size_t num_polys = 4; - std::vector rows; - - void set_trace(std::vector&& trace) { rows = std::move(trace); } - - AllPolynomials compute_polynomials() - { - const auto num_rows = get_circuit_subgroup_size(); - AllPolynomials polys; - - // Allocate mem for each column - for (auto& poly : polys.get_all()) { - poly = Polynomial(num_rows); - } - - for (size_t i = 0; i < rows.size(); i++) { - polys.Fibonacci_LAST[i] = rows[i].Fibonacci_LAST; - polys.Fibonacci_FIRST[i] = rows[i].Fibonacci_FIRST; - polys.Fibonacci_x[i] = rows[i].Fibonacci_x; - polys.Fibonacci_y[i] = rows[i].Fibonacci_y; - } - - polys.Fibonacci_x_shift = Polynomial(polys.Fibonacci_x.shifted()); - polys.Fibonacci_y_shift = Polynomial(polys.Fibonacci_y.shifted()); - - return polys; - } - - [[maybe_unused]] bool check_circuit() - { - auto polys = compute_polynomials(); - const size_t num_rows = polys.get_polynomial_size(); - - const auto evaluate_relation = [&](const std::string& relation_name) { - typename Relation::SumcheckArrayOfValuesOverSubrelations result; - for (auto& r : result) { - r = 0; - } - constexpr size_t NUM_SUBRELATIONS = result.size(); - - for (size_t i = 0; i < num_rows; ++i) { - Relation::accumulate(result, polys.get_row(i), {}, 1); - - bool x = true; - for (size_t j = 0; j < NUM_SUBRELATIONS; ++j) { - if (result[j] != 0) { - throw_or_abort( - format("Relation ", relation_name, ", subrelation index ", j, " failed at row ", i)); - x = false; - } - } - if (!x) { - return false; - } - } - return true; - }; - - return evaluate_relation.template operator()>("Fib"); - } - - [[nodiscard]] size_t get_num_gates() const { return rows.size(); } - - [[nodiscard]] size_t get_circuit_subgroup_size() const - { - const size_t num_rows = get_num_gates(); - const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); - size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); - return num_rows_pow2; - } -}; -} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp deleted file mode 100644 index d2d86ba4993..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.cpp +++ /dev/null @@ -1,62 +0,0 @@ -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/arithmetization/arithmetization.hpp" -#include -#include -#include -#include -#include -#include - -#include "./Fib_trace.hpp" - -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -using Row = Fib_vm::Row; - -std::vector FibTraceBuilder::build_trace() -{ - { - std::vector trace; - // Build up the rows - size_t n = 16; - // Build the is_last column - - // Add first row that makes the shifted cols 0 - Row first_row = Row{ .Fibonacci_FIRST = 1 }; - trace.push_back(first_row); - - // The actual first row - Row row = { - .Fibonacci_x = 0, - .Fibonacci_y = 1, - }; - trace.push_back(row); - - for (size_t i = 2; i < n; i++) { - Row prev_row = trace[i - 1]; - - FF x = prev_row.Fibonacci_y; - FF y = prev_row.Fibonacci_x + prev_row.Fibonacci_y; - Row row = { - .Fibonacci_x = x, - .Fibonacci_y = y, - }; - trace.push_back(row); - } - // Build the isLast row - trace[n - 1].Fibonacci_LAST = 1; - - // Build the shifts - for (size_t i = 1; i < n; i++) { - Row& row = trace[i - 1]; - row.Fibonacci_x_shift = trace[(i) % trace.size()].Fibonacci_x; - row.Fibonacci_y_shift = trace[(i) % trace.size()].Fibonacci_y; - } - return trace; - } -} -} // namespace proof_system \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp deleted file mode 100644 index 856400d82b4..00000000000 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp +++ /dev/null @@ -1,22 +0,0 @@ -#pragma once - -#include "barretenberg/common/throw_or_abort.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/proof_system/circuit_builder/circuit_builder_base.hpp" - -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/relations/generated/Fib.hpp" - -using namespace barretenberg; - -namespace proof_system { - -class FibTraceBuilder { - public: - using Flavor = proof_system::honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Row = Fib_vm::Row; - - static std::vector build_trace(); -}; -} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp index 92164873e44..9527e5283d3 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_translator_circuit_builder.hpp @@ -367,6 +367,7 @@ class GoblinTranslatorCircuitBuilder : public CircuitBuilderBase #include #include using namespace barretenberg; +using namespace crypto; namespace proof_system { @@ -25,8 +28,8 @@ template void GoblinUltraCircuitBuilder_::add_gates_to_ensure_ // Most polynomials are handled via the conventional Ultra method UltraCircuitBuilder_>::add_gates_to_ensure_all_polys_are_non_zero(); - // All that remains is to handle databus related polynomials. In what follows we populate the calldata with some - // mock data then constuct a single calldata read gate + // All that remains is to handle databus related and poseidon2 related polynomials. In what follows we populate the + // calldata with some mock data then constuct a single calldata read gate // Populate the calldata with some data public_calldata.emplace_back(this->add_variable(FF(5))); @@ -44,7 +47,7 @@ template void GoblinUltraCircuitBuilder_::add_gates_to_ensure_ this->w_l.emplace_back(public_calldata[read_idx]); // populate with value of calldata at read index this->w_r.emplace_back(this->add_variable(FF(read_idx))); // populate with read index as witness calldata_read_counts[read_idx]++; // increment read count at read index - q_busread.emplace_back(1); // read selector on + q_busread().emplace_back(1); // read selector on // populate all other components with zero this->w_o.emplace_back(this->zero_idx); @@ -60,6 +63,52 @@ template void GoblinUltraCircuitBuilder_::add_gates_to_ensure_ this->q_lookup_type.emplace_back(0); this->q_elliptic.emplace_back(0); this->q_aux.emplace_back(0); + this->q_poseidon2_external.emplace_back(0); + this->q_poseidon2_internal.emplace_back(0); + + ++this->num_gates; + + // mock gates that use poseidon selectors, with all zeros as input + this->w_l.emplace_back(this->zero_idx); + this->w_r.emplace_back(this->zero_idx); + this->w_o.emplace_back(this->zero_idx); + this->w_4.emplace_back(this->zero_idx); + this->q_m.emplace_back(0); + this->q_1.emplace_back(0); + this->q_2.emplace_back(0); + this->q_3.emplace_back(0); + this->q_c.emplace_back(0); + this->q_arith.emplace_back(0); + this->q_4.emplace_back(0); + this->q_sort.emplace_back(0); + this->q_lookup_type.emplace_back(0); + this->q_elliptic.emplace_back(0); + this->q_aux.emplace_back(0); + this->q_busread().emplace_back(0); + this->q_poseidon2_external.emplace_back(1); + this->q_poseidon2_internal.emplace_back(1); + + ++this->num_gates; + + // second gate that stores the output of all zeros of the poseidon gates + this->w_l.emplace_back(this->zero_idx); + this->w_r.emplace_back(this->zero_idx); + this->w_o.emplace_back(this->zero_idx); + this->w_4.emplace_back(this->zero_idx); + this->q_m.emplace_back(0); + this->q_1.emplace_back(0); + this->q_2.emplace_back(0); + this->q_3.emplace_back(0); + this->q_c.emplace_back(0); + this->q_arith.emplace_back(0); + this->q_4.emplace_back(0); + this->q_sort.emplace_back(0); + this->q_lookup_type.emplace_back(0); + this->q_elliptic.emplace_back(0); + this->q_aux.emplace_back(0); + this->q_busread().emplace_back(0); + this->q_poseidon2_external.emplace_back(0); + this->q_poseidon2_internal.emplace_back(0); ++this->num_gates; } @@ -197,5 +246,249 @@ template void GoblinUltraCircuitBuilder_::populate_ecc_op_wire num_ecc_op_gates += 2; }; +template +void GoblinUltraCircuitBuilder_::create_poseidon2_external_gate(const poseidon2_external_gate_& in) +{ + this->w_l.emplace_back(in.a); + this->w_r.emplace_back(in.b); + this->w_o.emplace_back(in.c); + this->w_4.emplace_back(in.d); + this->q_m.emplace_back(0); + this->q_1.emplace_back(Poseidon2Bn254ScalarFieldParams::round_constants[in.round_idx][0]); + this->q_2.emplace_back(Poseidon2Bn254ScalarFieldParams::round_constants[in.round_idx][1]); + this->q_3.emplace_back(Poseidon2Bn254ScalarFieldParams::round_constants[in.round_idx][2]); + this->q_c.emplace_back(0); + this->q_arith.emplace_back(0); + this->q_4.emplace_back(Poseidon2Bn254ScalarFieldParams::round_constants[in.round_idx][3]); + this->q_sort.emplace_back(0); + this->q_lookup_type.emplace_back(0); + this->q_elliptic.emplace_back(0); + this->q_aux.emplace_back(0); + this->q_busread().emplace_back(0); + this->q_poseidon2_external.emplace_back(1); + this->q_poseidon2_internal.emplace_back(0); + ++this->num_gates; +} + +template +void GoblinUltraCircuitBuilder_::create_poseidon2_internal_gate(const poseidon2_internal_gate_& in) +{ + this->w_l.emplace_back(in.a); + this->w_r.emplace_back(in.b); + this->w_o.emplace_back(in.c); + this->w_4.emplace_back(in.d); + this->q_m.emplace_back(0); + this->q_1.emplace_back(Poseidon2Bn254ScalarFieldParams::round_constants[in.round_idx][0]); + this->q_2.emplace_back(0); + this->q_3.emplace_back(0); + this->q_c.emplace_back(0); + this->q_arith.emplace_back(0); + this->q_4.emplace_back(0); + this->q_sort.emplace_back(0); + this->q_lookup_type.emplace_back(0); + this->q_elliptic.emplace_back(0); + this->q_aux.emplace_back(0); + this->q_busread().emplace_back(0); + this->q_poseidon2_external.emplace_back(0); + this->q_poseidon2_internal.emplace_back(1); + ++this->num_gates; +} + +template +inline FF GoblinUltraCircuitBuilder_::compute_poseidon2_external_identity(FF q_poseidon2_external_value, + FF q_1_value, + FF q_2_value, + FF q_3_value, + FF q_4_value, + FF w_1_value, + FF w_2_value, + FF w_3_value, + FF w_4_value, + FF w_1_shifted_value, + FF w_2_shifted_value, + FF w_3_shifted_value, + FF w_4_shifted_value, + FF alpha_base, + FF alpha) const +{ + // Power of alpha to separate individual sub-relations + // TODO(kesha): This is a repeated computation which can be efficiently optimized + const FF alpha_a = alpha_base; + const FF alpha_b = alpha_a * alpha; + const FF alpha_c = alpha_b * alpha; + const FF alpha_d = alpha_c * alpha; + + FF s1 = w_1_value + q_1_value; + FF s2 = w_2_value + q_2_value; + FF s3 = w_3_value + q_3_value; + FF s4 = w_4_value + q_4_value; + + FF u1 = s1 * s1; + u1 *= u1; + u1 *= s1; + FF u2 = s2 * s2; + u2 *= u2; + u2 *= s2; + FF u3 = s3 * s3; + u3 *= u3; + u3 *= s3; + FF u4 = s4 * s4; + u4 *= u4; + u4 *= s4; + + auto t0 = u1 + u2; + auto t1 = u3 + u4; + auto t2 = u2 + u2; + t2 += t1; + auto t3 = u4 + u4; + t3 += t0; + auto v4 = t1 + t1; + v4 += v4; + v4 += t3; + auto v2 = t0 + t0; + v2 += v2; + v2 += t2; + auto v1 = t3 + v2; + auto v3 = t2 + v4; + + return q_poseidon2_external_value * (alpha_a * (v1 - w_1_shifted_value) + alpha_b * (v2 - w_2_shifted_value) + + alpha_c * (v3 - w_3_shifted_value) + alpha_d * (v4 - w_4_shifted_value)); +} + +template +inline FF GoblinUltraCircuitBuilder_::compute_poseidon2_internal_identity(FF q_poseidon2_internal_value, + FF q_1_value, + FF w_1_value, + FF w_2_value, + FF w_3_value, + FF w_4_value, + FF w_1_shifted_value, + FF w_2_shifted_value, + FF w_3_shifted_value, + FF w_4_shifted_value, + FF alpha_base, + FF alpha) const +{ + // Power of alpha to separate individual sub-relations + // TODO(kesha): This is a repeated computation which can be efficiently optimized + const FF alpha_a = alpha_base; + const FF alpha_b = alpha_a * alpha; + const FF alpha_c = alpha_b * alpha; + const FF alpha_d = alpha_c * alpha; + + auto s1 = w_1_value + q_1_value; + + auto u1 = s1 * s1; + u1 *= u1; + u1 *= s1; + + auto sum = u1 + w_2_value + w_3_value + w_4_value; + auto v1 = u1 * crypto::Poseidon2Bn254ScalarFieldParams::internal_matrix_diagonal[0]; + v1 += sum; + auto v2 = w_2_value * crypto::Poseidon2Bn254ScalarFieldParams::internal_matrix_diagonal[1]; + v2 += sum; + auto v3 = w_3_value * crypto::Poseidon2Bn254ScalarFieldParams::internal_matrix_diagonal[2]; + v3 += sum; + auto v4 = w_4_value * crypto::Poseidon2Bn254ScalarFieldParams::internal_matrix_diagonal[3]; + v4 += sum; + + return q_poseidon2_internal_value * (alpha_a * (v1 - w_1_shifted_value) + alpha_b * (v2 - w_2_shifted_value) + + alpha_c * (v3 - w_3_shifted_value) + alpha_d * (v4 - w_4_shifted_value)); +} + +template bool GoblinUltraCircuitBuilder_::check_circuit() +{ + bool result = true; + if (!UltraCircuitBuilder_>::check_circuit()) { + return false; + } + + const FF poseidon2_external_base = FF::random_element(); + const FF poseidon2_internal_base = FF::random_element(); + const FF alpha = FF::random_element(); + + // For each gate + for (size_t i = 0; i < this->num_gates; i++) { + FF q_poseidon2_external_value; + FF q_poseidon2_internal_value; + FF q_1_value; + FF q_2_value; + FF q_3_value; + FF q_4_value; + FF w_1_value; + FF w_2_value; + FF w_3_value; + FF w_4_value; + // Get the values of selectors and wires and update tag products along the way + q_poseidon2_external_value = this->q_poseidon2_external[i]; + q_poseidon2_internal_value = this->q_poseidon2_internal[i]; + q_1_value = this->q_1[i]; + q_2_value = this->q_2[i]; + q_3_value = this->q_3[i]; + q_4_value = this->q_4[i]; + w_1_value = this->get_variable(this->w_l[i]); + w_2_value = this->get_variable(this->w_r[i]); + w_3_value = this->get_variable(this->w_o[i]); + w_4_value = this->get_variable(this->w_4[i]); + FF w_1_shifted_value; + FF w_2_shifted_value; + FF w_3_shifted_value; + FF w_4_shifted_value; + if (i < (this->num_gates - 1)) { + w_1_shifted_value = this->get_variable(this->w_l[i + 1]); + w_2_shifted_value = this->get_variable(this->w_r[i + 1]); + w_3_shifted_value = this->get_variable(this->w_o[i + 1]); + w_4_shifted_value = this->get_variable(this->w_4[i + 1]); + } else { + w_1_shifted_value = FF::zero(); + w_2_shifted_value = FF::zero(); + w_3_shifted_value = FF::zero(); + w_4_shifted_value = FF::zero(); + } + if (!compute_poseidon2_external_identity(q_poseidon2_external_value, + q_1_value, + q_2_value, + q_3_value, + q_4_value, + w_1_value, + w_2_value, + w_3_value, + w_4_value, + w_1_shifted_value, + w_2_shifted_value, + w_3_shifted_value, + w_4_shifted_value, + poseidon2_external_base, + alpha) + .is_zero()) { +#ifndef FUZZING + info("Poseidon2External identity fails at gate ", i); +#endif + result = false; + break; + } + if (!compute_poseidon2_internal_identity(q_poseidon2_internal_value, + q_1_value, + w_1_value, + w_2_value, + w_3_value, + w_4_value, + w_1_shifted_value, + w_2_shifted_value, + w_3_shifted_value, + w_4_shifted_value, + poseidon2_internal_base, + alpha) + .is_zero()) { +#ifndef FUZZING + info("Poseidon2Internal identity fails at gate ", i); +#endif + result = false; + break; + } + } + return result; +} + template class GoblinUltraCircuitBuilder_; } // namespace proof_system \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp index f4532a31895..e037fe2571d 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp @@ -42,7 +42,9 @@ template class GoblinUltraCircuitBuilder_ : public UltraCircuitBui WireVector& ecc_op_wire_3 = std::get<2>(ecc_op_wires); WireVector& ecc_op_wire_4 = std::get<3>(ecc_op_wires); - SelectorVector& q_busread = this->selectors.q_busread(); + SelectorVector& q_busread() { return this->selectors.q_busread(); }; + SelectorVector& q_poseidon2_external = this->selectors.q_poseidon2_external(); + SelectorVector& q_poseidon2_internal = this->selectors.q_poseidon2_internal(); // DataBus call/return data arrays std::vector public_calldata; @@ -132,6 +134,39 @@ template class GoblinUltraCircuitBuilder_ : public UltraCircuitBui } public_calldata.emplace_back(witness_index); } + void create_poseidon2_external_gate(const poseidon2_external_gate_& in); + void create_poseidon2_internal_gate(const poseidon2_internal_gate_& in); + + FF compute_poseidon2_external_identity(FF q_poseidon2_external_value, + FF q_1_value, + FF q_2_value, + FF q_3_value, + FF q_4_value, + FF w_1_value, + FF w_2_value, + FF w_3_value, + FF w_4_value, + FF w_1_shifted_value, + FF w_2_shifted_value, + FF w_3_shifted_value, + FF w_4_shifted_value, + FF alpha_base, + FF alpha) const; + + FF compute_poseidon2_internal_identity(FF q_poseidon2_internal_value, + FF q_1_value, + FF w_1_value, + FF w_2_value, + FF w_3_value, + FF w_4_value, + FF w_1_shifted_value, + FF w_2_shifted_value, + FF w_3_shifted_value, + FF w_4_shifted_value, + FF alpha_base, + FF alpha) const; + + bool check_circuit(); }; extern template class GoblinUltraCircuitBuilder_; using GoblinUltraCircuitBuilder = GoblinUltraCircuitBuilder_; diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.test.cpp index 5d95b57e39a..7a3424626ae 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.test.cpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.test.cpp @@ -8,6 +8,15 @@ auto& engine = numeric::random::get_debug_engine(); } namespace proof_system { +TEST(GoblinUltraCircuitBuilder, BaseCase) +{ + GoblinUltraCircuitBuilder circuit_constructor = GoblinUltraCircuitBuilder(); + fr a = fr::one(); + circuit_constructor.add_public_variable(a); + bool result = circuit_constructor.check_circuit(); + EXPECT_EQ(result, true); +} + /** * @brief Test the queueing of simple ecc ops via the Goblin builder * @details There are two things to check here: 1) When ecc ops are queued by the builder, the corresponding native @@ -15,7 +24,7 @@ namespace proof_system { * encoded in the op_wires, i.e. the operands can be reconstructed as expected. * */ -TEST(UltraCircuitBuilder, GoblinSimple) +TEST(GoblinUltraCircuitBuilder, GoblinSimple) { const size_t CHUNK_SIZE = plonk::NUM_LIMB_BITS_IN_FIELD_SIMULATION * 2; @@ -81,7 +90,7 @@ TEST(UltraCircuitBuilder, GoblinSimple) * @brief Check that the ultra ops are recorded correctly in the EccOpQueue * */ -TEST(UltraCircuitBuilder, GoblinEccOpQueueUltraOps) +TEST(GoblinUltraCircuitBuilder, GoblinEccOpQueueUltraOps) { // Construct a simple circuit with op gates auto builder = GoblinUltraCircuitBuilder(); diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.hpp new file mode 100644 index 00000000000..659187b4131 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.hpp @@ -0,0 +1,163 @@ +/** + * @file avm_template_circuit_builder.hpp + * @author Rumata888 + * @brief A circuit builder for the AVM toy version used to showcase permutation and lookup mechanisms for PIL + * + */ +#pragma once + +#include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/ecc/curves/bn254/fr.hpp" +#include "barretenberg/flavor/toy_avm.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "barretenberg/relations/relation_parameters.hpp" +#include "barretenberg/relations/toy_avm/generic_permutation_relation.hpp" + +namespace proof_system { + +/** + * @brief Circuit builder for the ToyAVM that is used to explain generic permutation settings + * + * @tparam Flavor + */ +template class ToyAVMCircuitBuilder { + public: + using FF = typename Flavor::FF; + using Polynomial = typename Flavor::Polynomial; + + static constexpr size_t NUM_POLYNOMIALS = Flavor::NUM_ALL_ENTITIES; + static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; + + using AllPolynomials = typename Flavor::AllPolynomials; + size_t num_gates = 0; + std::array, NUM_WIRES> wires; + ToyAVMCircuitBuilder() = default; + + void add_row(const std::array row) + { + for (size_t i = 0; i < NUM_WIRES; i++) { + wires[i].emplace_back(row[i]); + } + num_gates = wires[0].size(); + } + + /** + * @brief Compute the AVM Template flavor polynomial data required to generate a proof + * + * @return AllPolynomials + */ + AllPolynomials compute_polynomials() + { + + const auto num_gates_log2 = static_cast(numeric::get_msb64(num_gates)); + size_t num_gates_pow2 = 1UL << (num_gates_log2 + (1UL << num_gates_log2 == num_gates ? 0 : 1)); + + AllPolynomials polys; + for (auto& poly : polys.get_all()) { + poly = Polynomial(num_gates_pow2); + } + + polys.lagrange_first[0] = 1; + + for (size_t i = 0; i < num_gates; ++i) { + // Fill out the witness polynomials + polys.permutation_set_column_1[i] = wires[0][i]; + polys.permutation_set_column_2[i] = wires[1][i]; + polys.permutation_set_column_3[i] = wires[2][i]; + polys.permutation_set_column_4[i] = wires[3][i]; + polys.self_permutation_column[i] = wires[4][i]; + // By default the permutation is over all rows where we place data + polys.enable_tuple_set_permutation[i] = 1; + // The same column permutation alternates between even and odd values + polys.enable_single_column_permutation[i] = 1; + polys.enable_first_set_permutation[i] = i & 1; + polys.enable_second_set_permutation[i] = 1 - (i & 1); + } + return polys; + } + + /** + * @brief Check that the circuit is correct (proof should work) + * + */ + bool check_circuit() + { + // using FirstPermutationRelation = typename std::tuple_element_t<0, Flavor::Relations>; + // For now only gamma and beta are used + const FF gamma = FF::random_element(); + const FF beta = FF::random_element(); + proof_system::RelationParameters params{ + .eta = 0, + .beta = beta, + .gamma = gamma, + .public_input_delta = 0, + .lookup_grand_product_delta = 0, + .beta_sqr = 0, + .beta_cube = 0, + .eccvm_set_permutation_delta = 0, + }; + + // Compute polynomial values + auto polynomials = compute_polynomials(); + const size_t num_rows = polynomials.get_polynomial_size(); + + // Check the tuple permutation relation + proof_system::honk::logderivative_library::compute_logderivative_inverse< + Flavor, + honk::sumcheck::GenericPermutationRelation>( + polynomials, params, num_rows); + + using PermutationRelation = + honk::sumcheck::GenericPermutationRelation; + typename honk::sumcheck::GenericPermutationRelation::SumcheckArrayOfValuesOverSubrelations + permutation_result; + for (auto& r : permutation_result) { + r = 0; + } + for (size_t i = 0; i < num_rows; ++i) { + PermutationRelation::accumulate(permutation_result, polynomials.get_row(i), params, 1); + } + for (auto r : permutation_result) { + if (r != 0) { + info("Tuple GenericPermutationRelation failed."); + return false; + } + } + // Check the single permutation relation + proof_system::honk::logderivative_library::compute_logderivative_inverse< + Flavor, + honk::sumcheck::GenericPermutationRelation>( + polynomials, params, num_rows); + + using SameWirePermutationRelation = + honk::sumcheck::GenericPermutationRelation; + typename honk::sumcheck::GenericPermutationRelation::SumcheckArrayOfValuesOverSubrelations + second_permutation_result; + for (auto& r : second_permutation_result) { + r = 0; + } + for (size_t i = 0; i < num_rows; ++i) { + SameWirePermutationRelation::accumulate(second_permutation_result, polynomials.get_row(i), params, 1); + } + for (auto r : second_permutation_result) { + if (r != 0) { + info("Same wire GenericPermutationRelation failed."); + return false; + } + } + return true; + } + + [[nodiscard]] size_t get_num_gates() const { return num_gates; } + + [[nodiscard]] size_t get_circuit_subgroup_size(const size_t num_rows) const + { + + const auto num_rows_log2 = static_cast(numeric::get_msb64(num_rows)); + size_t num_rows_pow2 = 1UL << (num_rows_log2 + (1UL << num_rows_log2 == num_rows ? 0 : 1)); + return num_rows_pow2; + } +}; +} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp new file mode 100644 index 00000000000..62b2e4d83c3 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/toy_avm/toy_avm_circuit_builder.test.cpp @@ -0,0 +1,70 @@ +#include "toy_avm_circuit_builder.hpp" +#include "barretenberg/crypto/generators/generator_data.hpp" +#include + +using namespace barretenberg; + +namespace { +auto& engine = numeric::random::get_debug_engine(); +} + +namespace toy_avm_circuit_builder_tests { + +/** + * @brief A test explaining the work of the permutations in Toy AVM + * + */ +TEST(ToyAVMCircuitBuilder, BaseCase) +{ + + using FF = proof_system::honk::flavor::ToyAVM::FF; + const size_t circuit_size = 16; + proof_system::ToyAVMCircuitBuilder circuit_builder; + + // Sample 2*16 random elements for the tuple permutation example + std::vector column_0; + std::vector column_1; + for (size_t i = 0; i < circuit_size; i++) { + column_0.emplace_back(FF::random_element()); + column_1.emplace_back(FF::random_element()); + } + + // Sample 8 random elements for the single column permutation + std::vector column_2; + for (size_t i = 0; i < circuit_size / 2; i++) { + column_2.emplace_back(FF::random_element()); + } + + for (size_t i = 0; i < circuit_size; i++) { + // We put the same tuple of values in the first 2 wires and in the next 2 to at different rows + // We also put the same value in the self_permutation column in 2 consecutive rows + circuit_builder.add_row({ column_0[i], column_1[i], column_0[15 - i], column_1[15 - i], column_2[i / 2] }); + } + + // Test that permutations with correct values work + bool result = circuit_builder.check_circuit(); + EXPECT_EQ(result, true); + + // Store value temporarily + FF tmp = circuit_builder.wires[0][5]; + + // Replace one of the values in a tuple permutation column with a random one, breaking the permutation + circuit_builder.wires[0][5] = FF::random_element(); + + // Check that it fails + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, false); + + // Restore value + circuit_builder.wires[0][5] = tmp; + + // Check circuit passes + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, true); + + // Break single-column permutation + circuit_builder.wires[circuit_builder.wires.size() - 1][0] = FF::random_element(); + result = circuit_builder.check_circuit(); + EXPECT_EQ(result, false); +} +} // namespace toy_avm_circuit_builder_tests \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.cpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.cpp index 42ef656b807..39443c58c7f 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.cpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.cpp @@ -80,6 +80,7 @@ void UltraCircuitBuilder_::add_gates_to_ensure_all_polys_are_no q_elliptic.emplace_back(1); q_aux.emplace_back(1); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; // Some relations depend on wire shifts so we add another gate with @@ -140,6 +141,7 @@ void UltraCircuitBuilder_::create_add_gate(const add_triple_num_gates; } @@ -172,6 +174,7 @@ void UltraCircuitBuilder_::create_big_add_gate(const add_quad_< q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -266,6 +269,7 @@ void UltraCircuitBuilder_::create_big_mul_gate(const mul_quad_< q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -292,6 +296,7 @@ void UltraCircuitBuilder_::create_balanced_add_gate(const add_q q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; // Why 3? TODO: return to this // The purpose of this gate is to do enable lazy 32-bit addition. @@ -334,6 +339,7 @@ void UltraCircuitBuilder_::create_mul_gate(const mul_triple_num_gates; } /** @@ -363,6 +369,7 @@ void UltraCircuitBuilder_::create_bool_gate(const uint32_t vari q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -394,6 +401,7 @@ void UltraCircuitBuilder_::create_poly_gate(const poly_triple_< q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -448,6 +456,7 @@ void UltraCircuitBuilder_::create_ecc_add_gate(const ecc_add_ga q_elliptic.emplace_back(1); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } w_l.emplace_back(in.x2); @@ -466,6 +475,7 @@ void UltraCircuitBuilder_::create_ecc_add_gate(const ecc_add_ga q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -512,6 +522,7 @@ void UltraCircuitBuilder_::create_ecc_dbl_gate(const ecc_dbl_ga q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -531,6 +542,7 @@ void UltraCircuitBuilder_::create_ecc_dbl_gate(const ecc_dbl_ga q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -561,6 +573,7 @@ void UltraCircuitBuilder_::fix_witness(const uint32_t witness_i q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } @@ -636,6 +649,7 @@ plookup::ReadData UltraCircuitBuilder_::create_gates_ q_elliptic.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); ++this->num_gates; } return read_data; @@ -945,6 +959,7 @@ void UltraCircuitBuilder_::create_sort_constraint(const std::ve q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } // dummy gate needed because of sort widget's check of next row w_l.emplace_back(variable_index[variable_index.size() - 1]); @@ -964,6 +979,7 @@ void UltraCircuitBuilder_::create_sort_constraint(const std::ve q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } // useful to put variables in the witness that aren't already used - e.g. the dummy variables of the range constraint in @@ -998,6 +1014,7 @@ void UltraCircuitBuilder_::create_dummy_constraints(const std:: q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } } @@ -1029,6 +1046,7 @@ void UltraCircuitBuilder_::create_sort_constraint_with_edges( q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); // enforce range check for middle rows for (size_t i = gate_width; i < variable_index.size() - gate_width; i += gate_width) { @@ -1049,6 +1067,7 @@ void UltraCircuitBuilder_::create_sort_constraint_with_edges( q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } // enforce range checks of last row and ending at end if (variable_index.size() > gate_width) { @@ -1069,6 +1088,7 @@ void UltraCircuitBuilder_::create_sort_constraint_with_edges( q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } // dummy gate needed because of sort widget's check of next row @@ -1090,6 +1110,7 @@ void UltraCircuitBuilder_::create_sort_constraint_with_edges( q_lookup_type.emplace_back(0); q_aux.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); } // range constraint a value by decomposing it into limbs whose size should be the default range constraint size @@ -1206,6 +1227,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::LIMB_ACCUMULATE_2: { @@ -1217,6 +1239,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::NON_NATIVE_FIELD_1: { @@ -1228,6 +1251,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::NON_NATIVE_FIELD_2: { @@ -1239,6 +1263,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::NON_NATIVE_FIELD_3: { @@ -1250,6 +1275,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::ROM_CONSISTENCY_CHECK: { @@ -1265,6 +1291,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::RAM_CONSISTENCY_CHECK: { @@ -1281,6 +1308,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(1); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::RAM_TIMESTAMP_CHECK: { @@ -1294,6 +1322,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::ROM_READ: { @@ -1308,6 +1337,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); // read/write flag stored in q_c q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::RAM_READ: { @@ -1322,6 +1352,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); // read/write flag stored in q_c q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } case AUX_SELECTORS::RAM_WRITE: { @@ -1336,6 +1367,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(1); // read/write flag stored in q_c q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } default: { @@ -1347,6 +1379,7 @@ void UltraCircuitBuilder_::apply_aux_selectors(const AUX_SELECT q_c.emplace_back(0); q_arith.emplace_back(0); selectors.pad_additional(); + check_selector_length_consistency(); break; } } @@ -1869,6 +1902,7 @@ std::array UltraCircuitBuilder_::evaluate_non_nati q_aux.emplace_back(0); selectors.pad_additional(); } + check_selector_length_consistency(); this->num_gates += 4; return std::array{ @@ -1991,6 +2025,7 @@ std::array UltraCircuitBuilder_::evaluate_non_nati q_aux.emplace_back(0); selectors.pad_additional(); } + check_selector_length_consistency(); this->num_gates += 4; return std::array{ @@ -3363,7 +3398,7 @@ template bool UltraCircuitBuilder_:: alpha) .is_zero()) { #ifndef FUZZING - info("Arithemtic identity fails at gate ", i); + info("Arithmetic identity fails at gate ", i); #endif result = false; break; diff --git a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp index a8186c477dc..9767781e72d 100644 --- a/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp +++ b/barretenberg/cpp/src/barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp @@ -678,6 +678,25 @@ class UltraCircuitBuilder_ : public CircuitBuilderBase size imbalance between sorted and non-sorted sets. Checking for this + * gates. No arithmetic gate => size imbalance between sorted and non-sorted sets. Checking for this * and throwing an error would require a refactor of the Composer to catelog all 'orphan' variables not * assigned to gates. * diff --git a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp index bc8a95ae2ee..5e2b7b32ec6 100644 --- a/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/protogalaxy/protogalaxy_prover.hpp @@ -256,13 +256,13 @@ template class ProtoGalaxyProver_ { num_threads = num_threads > 0 ? num_threads : 1; // ensure num threads is >= 1 size_t iterations_per_thread = common_circuit_size / num_threads; // actual iterations per thread - // Constuct univariate accumulator containers; one per thread + // Construct univariate accumulator containers; one per thread std::vector thread_univariate_accumulators(num_threads); for (auto& accum : thread_univariate_accumulators) { Utils::zero_univariates(accum); } - // Constuct extended univariates containers; one per thread + // Construct extended univariates containers; one per thread std::vector extended_univariates; extended_univariates.resize(num_threads); diff --git a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp index da659a321e9..5508a72c292 100644 --- a/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/databus_lookup_relation.hpp @@ -3,7 +3,7 @@ #include #include "barretenberg/common/constexpr_utils.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/polynomials/polynomial.hpp" #include "barretenberg/polynomials/univariate.hpp" #include "barretenberg/relations/relation_types.hpp" @@ -37,11 +37,19 @@ template class DatabusLookupRelationImpl { * @return true * @return false */ - template static bool lookup_exists_at_row(const AllValues& row) + template static bool operation_exists_at_row(const AllValues& row) { return (row.q_busread == 1 || row.calldata_read_counts > 0); } + /** + * @brief Get the lookup inverse polynomial + * + * @tparam AllEntities + * @param in + * @return auto& + */ + template static auto& get_inverse_polynomial(AllEntities& in) { return in.lookup_inverses; } /** * @brief Compute the Accumulator whose values indicate whether the inverse is computed or not * @details This is needed for efficiency since we don't need to compute the inverse unless the log derivative @@ -154,7 +162,7 @@ template class DatabusLookupRelationImpl { /** * @brief Accumulate the contribution from two surelations for the log derivative databus lookup argument - * @details See lookup_library.hpp for details of the generic log-derivative lookup argument + * @details See logderivative_library.hpp for details of the generic log-derivative lookup argument * * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` * @param in an std::array containing the fully extended Accumulator edges. @@ -167,9 +175,9 @@ template class DatabusLookupRelationImpl { const Parameters& params, const FF& scaling_factor) { - honk::lookup_library::accumulate_logderivative_lookup_subrelation_contributions>( - accumulator, in, params, scaling_factor); + honk::logderivative_library:: + accumulate_logderivative_lookup_subrelation_contributions>( + accumulator, in, params, scaling_factor); } }; diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp index 1daf3469bc7..e52e297cfa4 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.cpp @@ -1,6 +1,6 @@ #include "barretenberg/flavor/ecc_vm.hpp" #include "barretenberg/flavor/relation_definitions_fwd.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "ecc_msm_relation.hpp" namespace proof_system::honk::sumcheck { @@ -25,7 +25,7 @@ void ECCVMLookupRelationImpl::accumulate(ContainerOverSubrelations& accumula const Parameters& params, const FF& scaling_factor) { - lookup_library::accumulate_logderivative_lookup_subrelation_contributions>( + logderivative_library::accumulate_logderivative_lookup_subrelation_contributions>( accumulator, in, params, scaling_factor); } diff --git a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp index 35af59f7490..aa3afffc87f 100644 --- a/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/ecc_vm/ecc_lookup_relation.hpp @@ -24,12 +24,21 @@ template class ECCVMLookupRelationImpl { static constexpr std::array SUBRELATION_LINEARLY_INDEPENDENT = { true, false }; - template static bool lookup_exists_at_row(const AllValues& row) + template static bool operation_exists_at_row(const AllValues& row) { return (row.msm_add == 1) || (row.msm_skew == 1) || (row.precompute_select == 1); } + /** + * @brief Get the inverse lookup polynomial + * + * @tparam AllEntities + * @param in + * @return auto& + */ + template static auto& get_inverse_polynomial(AllEntities& in) { return in.lookup_inverses; } + template static Accumulator compute_inverse_exists(const AllEntities& in) { diff --git a/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp b/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp deleted file mode 100644 index 428c6a1208b..00000000000 --- a/barretenberg/cpp/src/barretenberg/relations/generated/Fib.hpp +++ /dev/null @@ -1,64 +0,0 @@ - -#pragma once -#include "../relation_parameters.hpp" -#include "../relation_types.hpp" - -namespace proof_system::Fib_vm { - -template struct Row { - FF Fibonacci_LAST{}; - FF Fibonacci_FIRST{}; - FF Fibonacci_x{}; - FF Fibonacci_y{}; - FF Fibonacci_x_shift{}; - FF Fibonacci_y_shift{}; -}; - -#define DECLARE_VIEWS(index) \ - using View = typename std::tuple_element::type; \ - [[maybe_unused]] auto Fibonacci_LAST = View(new_term.Fibonacci_LAST); \ - [[maybe_unused]] auto Fibonacci_FIRST = View(new_term.Fibonacci_FIRST); \ - [[maybe_unused]] auto Fibonacci_x = View(new_term.Fibonacci_x); \ - [[maybe_unused]] auto Fibonacci_y = View(new_term.Fibonacci_y); \ - [[maybe_unused]] auto Fibonacci_x_shift = View(new_term.Fibonacci_x_shift); \ - [[maybe_unused]] auto Fibonacci_y_shift = View(new_term.Fibonacci_y_shift); - -template class FibImpl { - public: - using FF = FF_; - - static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ - 4, - 4, - }; - - template - void static accumulate(ContainerOverSubrelations& evals, - const AllEntities& new_term, - [[maybe_unused]] const RelationParameters&, - [[maybe_unused]] const FF& scaling_factor) - { - - // Contribution 0 - { - DECLARE_VIEWS(0); - - auto tmp = (((-Fibonacci_FIRST + FF(1)) * (-Fibonacci_LAST + FF(1))) * (Fibonacci_x_shift - Fibonacci_y)); - tmp *= scaling_factor; - std::get<0>(evals) += tmp; - } - // Contribution 1 - { - DECLARE_VIEWS(1); - - auto tmp = (((-Fibonacci_FIRST + FF(1)) * (-Fibonacci_LAST + FF(1))) * - (Fibonacci_y_shift - (Fibonacci_x + Fibonacci_y))); - tmp *= scaling_factor; - std::get<1>(evals) += tmp; - } - } -}; - -template using Fib = Relation>; - -} // namespace proof_system::Fib_vm \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp index aba162e56af..dc150d545ea 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_external_relation.hpp @@ -114,4 +114,4 @@ template class Poseidon2ExternalRelationImpl { }; template using Poseidon2ExternalRelation = Relation>; -} // namespace proof_system \ No newline at end of file +} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp index 1ec20c0956b..e67ee8519d9 100644 --- a/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp +++ b/barretenberg/cpp/src/barretenberg/relations/poseidon2_internal_relation.hpp @@ -94,4 +94,4 @@ template class Poseidon2InternalRelationImpl { }; // namespace proof_system template using Poseidon2InternalRelation = Relation>; -} // namespace proof_system \ No newline at end of file +} // namespace proof_system diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp new file mode 100644 index 00000000000..1822c388c4e --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.cpp @@ -0,0 +1,34 @@ +#include "generic_permutation_relation.hpp" +#include "barretenberg/flavor/relation_definitions_fwd.hpp" +#include "barretenberg/flavor/toy_avm.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" +#include "relation_definer.hpp" + +namespace proof_system::honk::sumcheck { + +/** + * @brief Expression for generic log-derivative-based set permutation. + * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Accumulator edges. + * @param relation_params contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ +template +template +void GenericPermutationRelationImpl::accumulate(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor) +{ + logderivative_library::accumulate_logderivative_permutation_subrelation_contributions< + FF, + GenericPermutationRelationImpl>(accumulator, in, params, scaling_factor); +} + +// template class GenericPermutationRelationImpl; +// template +// using GenericPermutationRelationExampleSettingsImpl = GenericPermutationRelationImpl; DEFINE_SUMCHECK_RELATION_CLASS(GenericPermutationRelationExampleSettingsImpl, flavor::AVMTemplate); + +DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(GenericPermutationRelationImpl, flavor::ToyAVM); +} // namespace proof_system::honk::sumcheck diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp new file mode 100644 index 00000000000..d4246a423f5 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/generic_permutation_relation.hpp @@ -0,0 +1,210 @@ +/** + * @file generic_permutation_relation.hpp + * @author Rumata888 + * @brief This file contains the template for the generic permutation that can be specialized to enforce various + * permutations (for explanation on how to define them, see "relation_definer.hpp") + * + */ +#pragma once +#include +#include + +#include "barretenberg/common/constexpr_utils.hpp" +#include "barretenberg/polynomials/polynomial.hpp" +#include "barretenberg/polynomials/univariate.hpp" +#include "barretenberg/relations/relation_types.hpp" + +namespace proof_system::honk::sumcheck { +/** + * @brief Specifies positions of elements in the tuple of entities received from methods in the Settings class + * + */ +enum GenericPermutationSettingIndices { + INVERSE_POLYNOMIAL_INDEX, /* The index of the inverse polynomial*/ + ENABLE_INVERSE_CORRECTNESS_CHECK_POLYNOMIAL_INDEX, /* The index of the polynomial enabling first subrelation*/ + FIRST_PERMUTATION_SET_ENABLE_POLYNOMIAL_INDEX, /* The index of the polynomial that adds an element from the first + set to the sum*/ + SECOND_PERMUTATION_SET_ENABLE_POLYNOMIAL_INDEX, /* The index of the polynomial that adds an element from the second + set to the sum*/ + + PERMUTATION_SETS_START_POLYNOMIAL_INDEX, /* The starting index of the polynomials that are used in the permutation + sets*/ +}; + +template class GenericPermutationRelationImpl { + public: + using FF = FF_; + // Read and write terms counts should stay set to 1 unless we want to permute several columns at once as accumulated + // sets (not as tuples). + static constexpr size_t READ_TERMS = 1; + static constexpr size_t WRITE_TERMS = 1; + // 1 + polynomial degree of this relation + static constexpr size_t LENGTH = READ_TERMS + WRITE_TERMS + 3; // 5 + + static constexpr std::array SUBRELATION_PARTIAL_LENGTHS{ + LENGTH, // inverse polynomial correctness sub-relation + LENGTH // log-derived terms subrelation + }; + + /** + * @brief We apply the power polynomial only to the first subrelation + * + *@details The first subrelation establishes correspondence between the inverse polynomial elements and the terms. + *The second relation computes the inverses of individual terms, which are then summed up with sumcheck + * + */ + static constexpr std::array SUBRELATION_LINEARLY_INDEPENDENT = { true, false }; + + /** + * @brief Check if we need to compute the inverse polynomial element value for this row + * @details This proxies to a method in the Settings class + * + * @param row All values at row + */ + template static bool operation_exists_at_row(const AllValues& row) + + { + return Settings::inverse_polynomial_is_computed_at_row(row); + } + + /** + * @brief Get the inverse permutation polynomial (needed to compute its value) + * + */ + template static auto& get_inverse_polynomial(AllEntities& in) + { + // WIRE containing the inverse of the product of terms at this row. Used to reconstruct individual inversed + // terms + return std::get(Settings::get_nonconst_entities(in)); + } + + /** + * @brief Get selector/wire switching on(1) or off(0) inverse computation + * + */ + template + static Accumulator compute_inverse_exists(const AllEntities& in) + { + using View = typename Accumulator::View; + + // WIRE/SELECTOR enabling the permutation used in the sumcheck computation. This affects the first subrelation + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute if the value from the first set exists in this row + * + * @tparam read_index Kept for compatibility with lookups, behavior doesn't change + */ + template + static Accumulator compute_read_term_predicate(const AllEntities& in) + + { + static_assert(read_index < WRITE_TERMS); + using View = typename Accumulator::View; + + // The selector/wire value that determines that an element from the first set needs to be included. Can be + // different from the wire used in the write part. + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute if the value from the second set exists in this row + * + * @tparam write_index Kept for compatibility with lookups, behavior doesn't change + */ + template + static Accumulator compute_write_term_predicate(const AllEntities& in) + { + static_assert(write_index < WRITE_TERMS); + using View = typename Accumulator::View; + + // The selector/wire value that determines that an element from the second set needs to be included. Can be + // different from the wire used in the read part. + return Accumulator( + View(std::get(Settings::get_const_entities(in)))); + } + + /** + * @brief Compute the value of a single item in the set + * + * @details Computes the polynomial \gamma + \sum_{i=0}^{num_columns}(column_i*\beta^i), so the tuple of columns is + * in the first set + * + * @tparam read_index Kept for compatibility with lookups, behavior doesn't change + * + * @param params Used for beta and gamma + */ + template + static Accumulator compute_read_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + + static_assert(read_index < READ_TERMS); + + // Retrieve all polynomials used + const auto all_polynomials = Settings::get_const_entities(in); + + auto result = Accumulator(0); + + // Iterate over tuple and sum as a polynomial over beta + barretenberg::constexpr_for( + [&]() { result = result * params.beta + View(std::get(all_polynomials)); }); + + const auto& gamma = params.gamma; + return result + gamma; + } + + /** + * @brief Compute the value of a single item in the set + * + * @details Computes the polynomial \gamma + \sum_{i=0}^{num_columns}(column_i*\beta^i), so the tuple of columns is + * in the second set + * + * @tparam write_index Kept for compatibility with lookups, behavior doesn't change + * + * @param params Used for beta and gamma + */ + template + static Accumulator compute_write_term(const AllEntities& in, const Parameters& params) + { + using View = typename Accumulator::View; + + static_assert(write_index < WRITE_TERMS); + + // Get all used entities + const auto& used_entities = Settings::get_const_entities(in); + + auto result = Accumulator(0); + // Iterate over tuple and sum as a polynomial over beta + barretenberg::constexpr_for( + [&]() { result = result * params.beta + View(std::get(used_entities)); }); + + const auto& gamma = params.gamma; + return result + gamma; + } + + /** + * @brief Expression for generic log-derivative-based set permutation. + * @param accumulator transformed to `evals + C(in(X)...)*scaling_factor` + * @param in an std::array containing the fully extended Accumulator edges. + * @param relation_params contains beta, gamma, and public_input_delta, .... + * @param scaling_factor optional term to scale the evaluation before adding to evals. + */ + template + static void accumulate(ContainerOverSubrelations& accumulator, + const AllEntities& in, + const Parameters& params, + const FF& scaling_factor); +}; + +template +using GenericPermutationRelation = Relation>; + +} // namespace proof_system::honk::sumcheck diff --git a/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp b/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp new file mode 100644 index 00000000000..4771c1260b7 --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/relations/toy_avm/relation_definer.hpp @@ -0,0 +1,213 @@ +/** + * @file relation_definer.hpp + * @author Rumata888 + * @brief This file contains settings for the General Permutation Relation implementations and (in the future) Lookup + * implementations + * + */ +#pragma once +#include +#include +namespace proof_system::honk::sumcheck { + +/** + * @brief This class contains an example of how to set PermutationSettings classes used by the + * GenericPermutationRelationImpl class to specify a concrete permutation + * + * @details To create your own permutation: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your permutation + * 3) Update "DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to include the new + * settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class ExampleTuplePermutationSettings { + public: + // This constant defines how many columns are bundled together to form each set. For example, in this case we are + // bundling tuples of (permutation_set_column_1, permutation_set_column_2) to be a permutation of + // (permutation_set_column_3,permutation_set_column_4). As the tuple has 2 elements, set the value to 2 + constexpr static size_t COLUMNS_PER_SET = 2; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial at this index. Otherwise the + * value needs to be set to zero. + * + * @details If this is true then permutation takes place in this row + * + */ + template static inline bool inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.enable_tuple_set_permutation == 1); + } + + /** + * @brief Get all the entities for the permutation when we don't need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple( + in.tuple_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_tuple_set_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_tuple_set_permutation, /* Enables adding first set to the sum */ + in.enable_tuple_set_permutation, /* Enables adding second set to the sum */ + in.permutation_set_column_3, /* The first entry in the first set tuple */ + in.permutation_set_column_4, /* The second entry in the first set tuple */ + in.permutation_set_column_1, /* The first entry in the second set tuple */ + in.permutation_set_column_2); /* The second entry in the second set tuple */ + } + + /** + * @brief Get all the entities for the permutation when need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_nonconst_entities(AllEntities& in) + { + return std::forward_as_tuple( + in.tuple_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_tuple_set_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_tuple_set_permutation, /* Enables adding first set to the sum */ + in.enable_tuple_set_permutation, /* Enables adding second set to the sum */ + in.permutation_set_column_3, /* The first entry in the first set tuple */ + in.permutation_set_column_4, /* The second entry in the first set tuple */ + in.permutation_set_column_1, /* The first entry in the second set tuple */ + in.permutation_set_column_2); /* The second entry in the second set tuple */ + } +}; + +/** + * @brief This class contains an example of how to set PermutationSettings classes used by the + * GenericPermutationRelationImpl class to specify a concrete permutation + * + * @details To create your own permutation: + * 1) Create a copy of this class and rename it + * 2) Update all the values with the ones needed for your permutation + * 3) Update "DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" and "DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS" to include the new + * settings + * 4) Add the relation with the chosen settings to Relations in the flavor (for example,"` + * using Relations = std::tuple>;)` + * + */ +class ExampleSameWirePermutationSettings { + public: + // This constant defines how many columns are bundled together to form each set. For example, in this case we are + // permuting entries in the column with itself (self_permutation_column), so we choose just one + constexpr static size_t COLUMNS_PER_SET = 1; + + /** + * @brief If this method returns true on a row of values, then the inverse polynomial at this index. Otherwise the + * value needs to be set to zero. + * + * @details If this is true then permutation takes place in this row + * + */ + template static inline bool inverse_polynomial_is_computed_at_row(const AllEntities& in) + { + return (in.enable_single_column_permutation == 1); + } + + /** + * @brief Get all the entities for the permutation when we don't need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_const_entities(const AllEntities& in) + { + + return std::forward_as_tuple( + in.single_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_single_column_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_first_set_permutation, /* Enables adding first set to the sum */ + in.enable_second_set_permutation, /* Enables adding second set to the sum */ + in.self_permutation_column, /* The first set column */ + in.self_permutation_column /* The second set column which in this case is the same as the first set column + */ + ); + } + + /** + * @brief Get all the entities for the permutation when need to update them + * + * @details The entities are returned as a tuple of references in the following order: + * - The entity/polynomial used to store the product of the inverse values + * - The entity/polynomial that switches on the subrelation of the permutation relation that ensures correctness of + * the inverse polynomial + * - The entity/polynomial that enables adding a tuple-generated value from the first set to the logderivative sum + * subrelation + * - The entity/polynomial that enables adding a tuple-generated value from the second set to the logderivative sum + * subrelation + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the first set (N.B. ORDER IS IMPORTANT!) + * - A sequence of COLUMNS_PER_SET entities/polynomials that represent the second set (N.B. ORDER IS IMPORTANT!) + * + * @return All the entities needed for the permutation + */ + template static inline auto get_nonconst_entities(AllEntities& in) + { + return std::forward_as_tuple( + in.single_permutation_inverses, /* The polynomial containing the inverse product*/ + in.enable_single_column_permutation, /* The polynomial enabling the product check subrelation */ + in.enable_first_set_permutation, /* Enables adding first set to the sum */ + in.enable_second_set_permutation, /* Enables adding second set to the sum */ + in.self_permutation_column, /* The first set column */ + in.self_permutation_column /* The second set column which in this case is the same as the first set column + */ + ); + } +}; + +#define DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, Settings) \ + template class RelationImplementation; \ + template using RelationImplementation##Settings = RelationImplementation; \ + DEFINE_SUMCHECK_RELATION_CLASS(RelationImplementation##Settings, flavor); + +#define DEFINE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(RelationImplementation, flavor) \ + DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleTuplePermutationSettings); \ + DEFINE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleSameWirePermutationSettings); + +#define DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, Settings) \ + extern template class RelationImplementation; \ + template using RelationImplementation##Settings = RelationImplementation; \ + DECLARE_SUMCHECK_RELATION_CLASS(RelationImplementation##Settings, flavor); + +#define DECLARE_IMPLEMENTATIONS_FOR_ALL_SETTINGS(RelationImplementation, flavor) \ + DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleTuplePermutationSettings); \ + DECLARE_IMPLEMENTATIONS_FOR_SETTINGS(RelationImplementation, flavor, ExampleSameWirePermutationSettings); +} // namespace proof_system::honk::sumcheck \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt b/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt index 7ba574b2604..3b7a634c740 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/CMakeLists.txt @@ -1 +1 @@ -barretenberg_module(stdlib_recursion ecc proof_system stdlib_primitives stdlib_pedersen_commitment stdlib_blake3s ultra_honk) \ No newline at end of file +barretenberg_module(stdlib_recursion ecc proof_system stdlib_primitives stdlib_pedersen_commitment stdlib_blake3s ultra_honk eccvm translator_vm) \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/goblin/full_goblin_recursion.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/goblin/full_goblin_recursion.test.cpp new file mode 100644 index 00000000000..51d9f1943cb --- /dev/null +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/goblin/full_goblin_recursion.test.cpp @@ -0,0 +1,96 @@ +#include "barretenberg/eccvm/eccvm_composer.hpp" +#include "barretenberg/goblin/goblin.hpp" +#include "barretenberg/goblin/mock_circuits.hpp" +#include "barretenberg/goblin/translation_evaluations.hpp" +#include "barretenberg/proof_system/circuit_builder/eccvm/eccvm_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/goblin_ultra_circuit_builder.hpp" +#include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" +#include "barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp" +#include "barretenberg/translator_vm/goblin_translator_composer.hpp" +#include "barretenberg/ultra_honk/ultra_composer.hpp" + +#include + +using namespace proof_system::honk; +namespace goblin_recursion_tests { + +class GoblinRecursionTests : public ::testing::Test { + protected: + static void SetUpTestSuite() + { + barretenberg::srs::init_crs_factory("../srs_db/ignition"); + barretenberg::srs::init_grumpkin_crs_factory("../srs_db/grumpkin"); + } + + using Curve = curve::BN254; + using FF = Curve::ScalarField; + using Fbase = Curve::BaseField; + using Point = Curve::AffineElement; + using CommitmentKey = pcs::CommitmentKey; + using OpQueue = proof_system::ECCOpQueue; + using GoblinUltraBuilder = proof_system::GoblinUltraCircuitBuilder; + using ECCVMFlavor = flavor::ECCVM; + using ECCVMBuilder = proof_system::ECCVMCircuitBuilder; + using ECCVMComposer = ECCVMComposer_; + using TranslatorFlavor = flavor::GoblinTranslator; + using TranslatorBuilder = proof_system::GoblinTranslatorCircuitBuilder; + using TranslatorComposer = GoblinTranslatorComposer; + using TranslatorConsistencyData = barretenberg::TranslationEvaluations; + using Proof = proof_system::plonk::proof; + using NativeVerificationKey = flavor::GoblinUltra::VerificationKey; + using RecursiveFlavor = flavor::GoblinUltraRecursive_; + using RecursiveVerifier = proof_system::plonk::stdlib::recursion::honk::UltraRecursiveVerifier_; + using KernelInput = Goblin::AccumulationOutput; + + /** + * @brief Construct a mock kernel circuit + * @details This circuit contains (1) some basic/arbitrary arithmetic gates, (2) a genuine recursive verification of + * the proof provided as input. It does not contain any other real kernel logic. + * + * @param builder + * @param kernel_input A proof to be recursively verified and the corresponding native verification key + */ + static void construct_mock_kernel_circuit(GoblinUltraBuilder& builder, KernelInput& kernel_input) + { + // Generic operations e.g. state updates (just arith gates for now) + GoblinTestingUtils::construct_arithmetic_circuit(builder); + + // Execute recursive aggregation of previous kernel proof + RecursiveVerifier verifier{ &builder, kernel_input.verification_key }; + // TODO(https://github.com/AztecProtocol/barretenberg/issues/801): Aggregation + auto pairing_points = verifier.verify_proof(kernel_input.proof); // app function proof + pairing_points = verifier.verify_proof(kernel_input.proof); // previous kernel proof + } +}; + +/** + * @brief A full Goblin test that mimicks the basic aztec client architecture + * + */ +TEST_F(GoblinRecursionTests, Pseudo) +{ + barretenberg::Goblin goblin; + + // Construct an initial circuit; its proof will be recursively verified by the first kernel + GoblinUltraBuilder initial_circuit{ goblin.op_queue }; + GoblinTestingUtils::construct_simple_initial_circuit(initial_circuit); + KernelInput kernel_input = goblin.accumulate(initial_circuit); + + // Construct a series of simple Goblin circuits; generate and verify their proofs + size_t NUM_CIRCUITS = 2; + for (size_t circuit_idx = 0; circuit_idx < NUM_CIRCUITS; ++circuit_idx) { + // Construct a circuit with logic resembling that of the "kernel circuit" + GoblinUltraBuilder circuit_builder{ goblin.op_queue }; + construct_mock_kernel_circuit(circuit_builder, kernel_input); + + // Construct proof of the current kernel circuit to be recursively verified by the next one + kernel_input = goblin.accumulate(circuit_builder); + } + + Goblin::Proof proof = goblin.prove(); + bool verified = goblin.verify(proof); + EXPECT_TRUE(verified); +} + +// TODO(https://github.com/AztecProtocol/barretenberg/issues/787) Expand these tests. +} // namespace goblin_recursion_tests diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/goblin_verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/goblin_verifier.test.cpp index 1c2da5ce7bc..8130ff26e85 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/goblin_verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/goblin_verifier.test.cpp @@ -10,7 +10,7 @@ namespace proof_system::plonk::stdlib::recursion::honk { /** - * @brief Test suite for recursive verification of conventional Ultra Honk proofs + * @brief Test suite for recursive verification of Goblin Ultra Honk proofs * @details The recursive verification circuit is arithmetized in two different ways: 1) using the conventional Ultra * arithmetization (UltraCircuitBuilder), or 2) a Goblin-style Ultra arithmetization (GoblinUltraCircuitBuilder). * @@ -18,12 +18,19 @@ namespace proof_system::plonk::stdlib::recursion::honk { */ template class GoblinRecursiveVerifierTest : public testing::Test { - // Define types relevant for inner circuit - using Flavor = ::proof_system::honk::flavor::GoblinUltra; - using InnerComposer = ::proof_system::honk::UltraComposer_; + // Define types relevant for testing + using UltraFlavor = ::proof_system::honk::flavor::Ultra; + using GoblinUltraFlavor = ::proof_system::honk::flavor::GoblinUltra; + using UltraComposer = ::proof_system::honk::UltraComposer_; + using GoblinUltraComposer = ::proof_system::honk::UltraComposer_; + + // Define types for the inner circuit, i.e. the circuit whose proof will be recursively verified + using InnerFlavor = GoblinUltraFlavor; + using InnerComposer = GoblinUltraComposer; using InnerBuilder = typename InnerComposer::CircuitBuilder; - using NativeVerifier = ::proof_system::honk::UltraVerifier_<::proof_system::honk::flavor::Ultra>; using InnerCurve = bn254; + using InnerCommitment = InnerFlavor::Commitment; + using InnerFF = InnerFlavor::FF; // Types for recursive verifier circuit using RecursiveFlavor = ::proof_system::honk::flavor::GoblinUltraRecursive_; @@ -31,6 +38,16 @@ template class GoblinRecursiveVerifierTest : public testi using OuterBuilder = BuilderType; using VerificationKey = typename RecursiveVerifier::VerificationKey; + // Helper for getting composer for prover/verifier of recursive (outer) circuit + template static auto get_outer_composer() + { + if constexpr (IsGoblinBuilder) { + return GoblinUltraComposer(); + } else { + return UltraComposer(); + } + } + /** * @brief Create a non-trivial arbitrary inner circuit, the proof of which will be recursively verified * @@ -130,25 +147,25 @@ template class GoblinRecursiveVerifierTest : public testi { // Create an arbitrary inner circuit auto inner_circuit = create_inner_circuit(); + OuterBuilder outer_circuit; // Compute native verification key InnerComposer inner_composer; auto instance = inner_composer.create_instance(inner_circuit); auto prover = inner_composer.create_prover(instance); // A prerequisite for computing VK - // Instantiate the recursive verification key from the native verification key - OuterBuilder outer_circuit; - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); + // Instantiate the recursive verifier using the native verification key + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; // Spot check some values in the recursive VK to ensure it was constructed correctly - EXPECT_EQ(verification_key->circuit_size, instance->verification_key->circuit_size); - EXPECT_EQ(verification_key->log_circuit_size, instance->verification_key->log_circuit_size); - EXPECT_EQ(verification_key->num_public_inputs, instance->verification_key->num_public_inputs); - EXPECT_EQ(verification_key->q_m.get_value(), instance->verification_key->q_m); - EXPECT_EQ(verification_key->q_r.get_value(), instance->verification_key->q_r); - EXPECT_EQ(verification_key->sigma_1.get_value(), instance->verification_key->sigma_1); - EXPECT_EQ(verification_key->id_3.get_value(), instance->verification_key->id_3); - EXPECT_EQ(verification_key->lagrange_ecc_op.get_value(), instance->verification_key->lagrange_ecc_op); + EXPECT_EQ(verifier.key->circuit_size, instance->verification_key->circuit_size); + EXPECT_EQ(verifier.key->log_circuit_size, instance->verification_key->log_circuit_size); + EXPECT_EQ(verifier.key->num_public_inputs, instance->verification_key->num_public_inputs); + EXPECT_EQ(verifier.key->q_m.get_value(), instance->verification_key->q_m); + EXPECT_EQ(verifier.key->q_r.get_value(), instance->verification_key->q_r); + EXPECT_EQ(verifier.key->sigma_1.get_value(), instance->verification_key->sigma_1); + EXPECT_EQ(verifier.key->id_3.get_value(), instance->verification_key->id_3); + EXPECT_EQ(verifier.key->lagrange_ecc_op.get_value(), instance->verification_key->lagrange_ecc_op); } /** @@ -168,15 +185,13 @@ template class GoblinRecursiveVerifierTest : public testi // Create a recursive verification circuit for the proof of the inner circuit OuterBuilder outer_circuit; - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); - RecursiveVerifier verifier(&outer_circuit, verification_key); + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; auto pairing_points = verifier.verify_proof(inner_proof); - // Check the recursive verifier circuit + // Check for a failure flag in the recursive verifier circuit EXPECT_EQ(outer_circuit.failed(), false) << outer_circuit.err(); - EXPECT_TRUE(outer_circuit.check_circuit()); - // Additional check 1: Perform native verification then perform the pairing on the outputs of the recursive + // Check 1: Perform native verification then perform the pairing on the outputs of the recursive // verifier and check that the result agrees. auto native_verifier = inner_composer.create_verifier(instance); auto native_result = native_verifier.verify_proof(inner_proof); @@ -184,15 +199,25 @@ template class GoblinRecursiveVerifierTest : public testi pairing_points[1].get_value()); EXPECT_EQ(recursive_result, native_result); - // Additional check 2: Ensure that the underlying native and recursive verification algorithms agree by ensuring + // Check 2: Ensure that the underlying native and recursive verification algorithms agree by ensuring // the manifests produced by each agree. auto recursive_manifest = verifier.transcript->get_manifest(); auto native_manifest = native_verifier.transcript->get_manifest(); - // recursive_manifest.print(); - // native_manifest.print(); for (size_t i = 0; i < recursive_manifest.size(); ++i) { EXPECT_EQ(recursive_manifest[i], native_manifest[i]); } + + // Check 3: Construct and verify a proof of the recursive verifier circuit + { + auto composer = get_outer_composer(); + auto instance = composer.create_instance(outer_circuit); + auto prover = composer.create_prover(instance); + auto verifier = composer.create_verifier(instance); + auto proof = prover.construct_proof(); + bool verified = verifier.verify_proof(proof); + + ASSERT(verified); + } } /** @@ -214,14 +239,13 @@ template class GoblinRecursiveVerifierTest : public testi // Arbitrarily tamper with the proof to be verified inner_prover.transcript->deserialize_full_transcript(); - inner_prover.transcript->sorted_accum_comm = Flavor::Commitment::one() * Flavor::FF::random_element(); + inner_prover.transcript->sorted_accum_comm = InnerCommitment::one() * InnerFF::random_element(); inner_prover.transcript->serialize_full_transcript(); inner_proof = inner_prover.export_proof(); // Create a recursive verification circuit for the proof of the inner circuit OuterBuilder outer_circuit; - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); - RecursiveVerifier verifier(&outer_circuit, verification_key); + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; verifier.verify_proof(inner_proof); // We expect the circuit check to fail due to the bad proof diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp index 1a67be711dd..6c303d15e25 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.cpp @@ -8,9 +8,9 @@ namespace proof_system::plonk::stdlib::recursion::honk { template -UltraRecursiveVerifier_::UltraRecursiveVerifier_(Builder* builder, - std::shared_ptr verifier_key) - : key(verifier_key) +UltraRecursiveVerifier_::UltraRecursiveVerifier_( + Builder* builder, const std::shared_ptr& native_verifier_key) + : key(std::make_shared(builder, native_verifier_key)) , builder(builder) {} diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp index 01e567c667d..62803746d5d 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/ultra_recursive_verifier.hpp @@ -14,11 +14,13 @@ template class UltraRecursiveVerifier_ { using Commitment = typename Flavor::Commitment; using GroupElement = typename Flavor::GroupElement; using VerificationKey = typename Flavor::VerificationKey; + using NativeVerificationKey = typename Flavor::NativeVerificationKey; using VerifierCommitmentKey = typename Flavor::VerifierCommitmentKey; using Builder = typename Flavor::CircuitBuilder; using PairingPoints = std::array; - explicit UltraRecursiveVerifier_(Builder* builder, std::shared_ptr verifier_key = nullptr); + explicit UltraRecursiveVerifier_(Builder* builder, + const std::shared_ptr& native_verifier_key); UltraRecursiveVerifier_(UltraRecursiveVerifier_&& other) = delete; UltraRecursiveVerifier_(const UltraRecursiveVerifier_& other) = delete; UltraRecursiveVerifier_& operator=(const UltraRecursiveVerifier_& other) = delete; diff --git a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/verifier.test.cpp b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/verifier.test.cpp index ddb345b972b..3b045a35a73 100644 --- a/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/verifier.test.cpp +++ b/barretenberg/cpp/src/barretenberg/stdlib/recursion/honk/verifier/verifier.test.cpp @@ -18,12 +18,18 @@ namespace proof_system::plonk::stdlib::recursion::honk { */ template class RecursiveVerifierTest : public testing::Test { - // Define types relevant for inner circuit - using Flavor = ::proof_system::honk::flavor::Ultra; - using InnerComposer = ::proof_system::honk::UltraComposer_; + // Define types relevant for testing + using UltraFlavor = ::proof_system::honk::flavor::Ultra; + using GoblinUltraFlavor = ::proof_system::honk::flavor::GoblinUltra; + using UltraComposer = ::proof_system::honk::UltraComposer_; + using GoblinUltraComposer = ::proof_system::honk::UltraComposer_; + + using InnerFlavor = UltraFlavor; + using InnerComposer = UltraComposer; using InnerBuilder = typename InnerComposer::CircuitBuilder; - using NativeVerifier = ::proof_system::honk::UltraVerifier_<::proof_system::honk::flavor::Ultra>; using InnerCurve = bn254; + using Commitment = InnerFlavor::Commitment; + using FF = InnerFlavor::FF; // Types for recursive verifier circuit using RecursiveFlavor = ::proof_system::honk::flavor::UltraRecursive_; @@ -31,6 +37,16 @@ template class RecursiveVerifierTest : public testing::Te using OuterBuilder = BuilderType; using VerificationKey = typename RecursiveVerifier::VerificationKey; + // Helper for getting composer for prover/verifier of recursive (outer) circuit + template static auto get_outer_composer() + { + if constexpr (IsGoblinBuilder) { + return GoblinUltraComposer(); + } else { + return UltraComposer(); + } + } + /** * @brief Create a non-trivial arbitrary inner circuit, the proof of which will be recursively verified * @@ -121,17 +137,17 @@ template class RecursiveVerifierTest : public testing::Te auto instance = inner_composer.create_instance(inner_circuit); auto prover = inner_composer.create_prover(instance); // A prerequisite for computing VK - // Instantiate the recursive verification key from the native verification key - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); + // Instantiate the recursive verifier using the native verification key + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; // Spot check some values in the recursive VK to ensure it was constructed correctly - EXPECT_EQ(verification_key->circuit_size, instance->verification_key->circuit_size); - EXPECT_EQ(verification_key->log_circuit_size, instance->verification_key->log_circuit_size); - EXPECT_EQ(verification_key->num_public_inputs, instance->verification_key->num_public_inputs); - EXPECT_EQ(verification_key->q_m.get_value(), instance->verification_key->q_m); - EXPECT_EQ(verification_key->q_r.get_value(), instance->verification_key->q_r); - EXPECT_EQ(verification_key->sigma_1.get_value(), instance->verification_key->sigma_1); - EXPECT_EQ(verification_key->id_3.get_value(), instance->verification_key->id_3); + EXPECT_EQ(verifier.key->circuit_size, instance->verification_key->circuit_size); + EXPECT_EQ(verifier.key->log_circuit_size, instance->verification_key->log_circuit_size); + EXPECT_EQ(verifier.key->num_public_inputs, instance->verification_key->num_public_inputs); + EXPECT_EQ(verifier.key->q_m.get_value(), instance->verification_key->q_m); + EXPECT_EQ(verifier.key->q_r.get_value(), instance->verification_key->q_r); + EXPECT_EQ(verifier.key->sigma_1.get_value(), instance->verification_key->sigma_1); + EXPECT_EQ(verifier.key->id_3.get_value(), instance->verification_key->id_3); } /** @@ -152,15 +168,13 @@ template class RecursiveVerifierTest : public testing::Te // Create a recursive verification circuit for the proof of the inner circuit OuterBuilder outer_circuit; - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); - RecursiveVerifier verifier(&outer_circuit, verification_key); + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; auto pairing_points = verifier.verify_proof(inner_proof); - // Check the recursive verifier circuit + // Check for a failure flag in the recursive verifier circuit EXPECT_EQ(outer_circuit.failed(), false) << outer_circuit.err(); - EXPECT_TRUE(outer_circuit.check_circuit()); - // Additional check 1: Perform native verification then perform the pairing on the outputs of the recursive + // Check 1: Perform native verification then perform the pairing on the outputs of the recursive // verifier and check that the result agrees. auto native_verifier = inner_composer.create_verifier(instance); auto native_result = native_verifier.verify_proof(inner_proof); @@ -168,15 +182,25 @@ template class RecursiveVerifierTest : public testing::Te pairing_points[1].get_value()); EXPECT_EQ(recursive_result, native_result); - // Additional check 2: Ensure that the underlying native and recursive verification algorithms agree by ensuring + // Check 2: Ensure that the underlying native and recursive verification algorithms agree by ensuring // the manifests produced by each agree. auto recursive_manifest = verifier.transcript->get_manifest(); auto native_manifest = native_verifier.transcript->get_manifest(); - // recursive_manifest.print(); - // native_manifest.print(); for (size_t i = 0; i < recursive_manifest.size(); ++i) { EXPECT_EQ(recursive_manifest[i], native_manifest[i]); } + + // Check 3: Construct and verify a proof of the recursive verifier circuit + { + auto composer = get_outer_composer(); + auto instance = composer.create_instance(outer_circuit); + auto prover = composer.create_prover(instance); + auto verifier = composer.create_verifier(instance); + auto proof = prover.construct_proof(); + bool verified = verifier.verify_proof(proof); + + ASSERT(verified); + } } /** @@ -199,14 +223,13 @@ template class RecursiveVerifierTest : public testing::Te // Arbitrarily tamper with the proof to be verified inner_prover.transcript->deserialize_full_transcript(); - inner_prover.transcript->sorted_accum_comm = Flavor::Commitment::one() * Flavor::FF::random_element(); + inner_prover.transcript->sorted_accum_comm = Commitment::one() * FF::random_element(); inner_prover.transcript->serialize_full_transcript(); inner_proof = inner_prover.export_proof(); // Create a recursive verification circuit for the proof of the inner circuit OuterBuilder outer_circuit; - auto verification_key = std::make_shared(&outer_circuit, instance->verification_key); - RecursiveVerifier verifier(&outer_circuit, verification_key); + RecursiveVerifier verifier{ &outer_circuit, instance->verification_key }; verifier.verify_proof(inner_proof); // We expect the circuit check to fail due to the bad proof diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp index 41924b71875..8e1a6ede949 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/instance/prover_instance.cpp @@ -1,5 +1,5 @@ #include "prover_instance.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/proof_system/circuit_builder/ultra_circuit_builder.hpp" #include "barretenberg/proof_system/composer/permutation_lib.hpp" #include "barretenberg/proof_system/library/grand_product_delta.hpp" @@ -330,6 +330,8 @@ template void ProverInstance_::initialize_prover_polynomi prover_polynomials.lookup_inverses = proving_key->lookup_inverses; prover_polynomials.q_busread = proving_key->q_busread; prover_polynomials.databus_id = proving_key->databus_id; + prover_polynomials.q_poseidon2_external = proving_key->q_poseidon2_external; + prover_polynomials.q_poseidon2_internal = proving_key->q_poseidon2_internal; } // These polynomials have not yet been computed; initialize them so prover_polynomials is "full" and we can use @@ -451,7 +453,7 @@ void ProverInstance_::compute_logderivative_inverse(FF beta, FF gamma) relation_parameters.gamma = gamma; // Compute permutation and lookup grand product polynomials - lookup_library::compute_logderivative_inverse( + logderivative_library::compute_logderivative_inverse( prover_polynomials, relation_parameters, proving_key->circuit_size); } diff --git a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp index b5c366be750..7ec78a73799 100644 --- a/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp +++ b/barretenberg/cpp/src/barretenberg/sumcheck/sumcheck_round.hpp @@ -123,13 +123,13 @@ template class SumcheckProverRound { barretenberg::thread_utils::calculate_num_threads_pow2(round_size, min_iterations_per_thread); size_t iterations_per_thread = round_size / num_threads; // actual iterations per thread - // Constuct univariate accumulator containers; one per thread + // Construct univariate accumulator containers; one per thread std::vector thread_univariate_accumulators(num_threads); for (auto& accum : thread_univariate_accumulators) { Utils::zero_univariates(accum); } - // Constuct extended edge containers; one per thread + // Construct extended edge containers; one per thread std::vector extended_edges; extended_edges.resize(num_threads); diff --git a/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp b/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp index 018001ec101..e734829460c 100644 --- a/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp +++ b/barretenberg/cpp/src/barretenberg/transcript/transcript.hpp @@ -4,8 +4,16 @@ #include "barretenberg/crypto/blake3s/blake3s.hpp" #include "barretenberg/crypto/pedersen_hash/pedersen.hpp" +// #define LOG_CHALLENGES +// #define LOG_INTERACTIONS + namespace proof_system::honk { +template +concept Loggable = (std::same_as || std::same_as || + std::same_as || + std::same_as || std::same_as); + // class TranscriptManifest; class TranscriptManifest { struct RoundData { @@ -268,6 +276,11 @@ class BaseTranscript { auto element_bytes = to_buffer(element); proof_data.insert(proof_data.end(), element_bytes.begin(), element_bytes.end()); +#ifdef LOG_INTERACTIONS + if constexpr (Loggable) { + info("sent: ", label, ": ", element); + } +#endif BaseTranscript::consume_prover_element_bytes(label, element_bytes); } @@ -289,6 +302,11 @@ class BaseTranscript { T element = from_buffer(element_bytes); +#ifdef LOG_INTERACTIONS + if constexpr (Loggable) { + info("received: ", label, ": ", element); + } +#endif return element; } @@ -320,7 +338,14 @@ class BaseTranscript { return verifier_transcript; }; - uint256_t get_challenge(const std::string& label) { return get_challenges(label)[0]; } + uint256_t get_challenge(const std::string& label) + { + uint256_t result = get_challenges(label)[0]; +#if defined LOG_CHALLENGES || defined LOG_INTERACTIONS + info("challenge: ", label, ": ", result); +#endif + return result; + } [[nodiscard]] TranscriptManifest get_manifest() const { return manifest; }; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp index ed7c1274d5e..d186ebc6eb7 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_composer.test.cpp @@ -9,6 +9,9 @@ #include using namespace proof_system::honk; +using CircuitBuilder = flavor::GoblinTranslator::CircuitBuilder; +using Transcript = flavor::GoblinTranslator::Transcript; +using OpQueue = proof_system::ECCOpQueue; namespace test_goblin_translator_composer { @@ -25,15 +28,6 @@ std::vector add_variables(auto& circuit_constructor, std::vector(); @@ -68,14 +62,24 @@ TEST_F(GoblinTranslatorComposerTests, Basic) op_queue->add_accumulate(P1); op_queue->mul_accumulate(P2, z); } - Fq batching_challenge = Fq::random_element(); - Fq x = Fq::random_element(); - auto circuit_builder = proof_system::GoblinTranslatorCircuitBuilder(batching_challenge, x); - circuit_builder.feed_ecc_op_queue_into_circuit(op_queue); + + auto prover_transcript = std::make_shared(); + prover_transcript->send_to_verifier("init", Fq::random_element()); + prover_transcript->export_proof(); + Fq translation_batching_challenge = prover_transcript->get_challenge("Translation:batching_challenge"); + Fq translation_evaluation_challenge = Fq::random_element(); + auto circuit_builder = CircuitBuilder(translation_batching_challenge, translation_evaluation_challenge, op_queue); EXPECT_TRUE(circuit_builder.check_circuit()); auto composer = GoblinTranslatorComposer(); - prove_and_verify(circuit_builder, composer, /*expected_result=*/true); + auto prover = composer.create_prover(circuit_builder, prover_transcript); + auto proof = prover.construct_proof(); + + auto verifier_transcript = std::make_shared(prover_transcript->proof_data); + verifier_transcript->template receive_from_prover("init"); + auto verifier = composer.create_verifier(circuit_builder, verifier_transcript); + bool verified = verifier.verify_proof(proof); + EXPECT_TRUE(verified); } } // namespace test_goblin_translator_composer diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp index cd3e010b0d9..aeaa9a2719a 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.cpp @@ -256,13 +256,12 @@ void GoblinTranslatorProver::execute_preamble_round() const auto SHIFT = uint256_t(1) << Flavor::NUM_LIMB_BITS; const auto SHIFTx2 = uint256_t(1) << (Flavor::NUM_LIMB_BITS * 2); const auto SHIFTx3 = uint256_t(1) << (Flavor::NUM_LIMB_BITS * 3); - const auto accumulated_result = typename Flavor::BF(uint256_t(key->accumulators_binary_limbs_0[1]) + - uint256_t(key->accumulators_binary_limbs_1[1]) * SHIFT + - uint256_t(key->accumulators_binary_limbs_2[1]) * SHIFTx2 + - uint256_t(key->accumulators_binary_limbs_3[1]) * SHIFTx3); + const auto accumulated_result = + BF(uint256_t(key->accumulators_binary_limbs_0[1]) + uint256_t(key->accumulators_binary_limbs_1[1]) * SHIFT + + uint256_t(key->accumulators_binary_limbs_2[1]) * SHIFTx2 + + uint256_t(key->accumulators_binary_limbs_3[1]) * SHIFTx3); transcript->send_to_verifier("circuit_size", circuit_size); transcript->send_to_verifier("evaluation_input_x", key->evaluation_input_x); - transcript->send_to_verifier("batching_challenge_v", key->batching_challenge_v); transcript->send_to_verifier("accumulated_result", accumulated_result); } @@ -366,7 +365,7 @@ void GoblinTranslatorProver::execute_zeromorph_rounds() plonk::proof& GoblinTranslatorProver::export_proof() { - proof.proof_data = transcript->proof_data; + proof.proof_data = transcript->export_proof(); return proof; } diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp index 3a2db24407d..99bf48c490d 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_prover.hpp @@ -12,6 +12,7 @@ class GoblinTranslatorProver { using Flavor = honk::flavor::GoblinTranslator; using FF = typename Flavor::FF; + using BF = typename Flavor::BF; using Commitment = typename Flavor::Commitment; using CommitmentKey = typename Flavor::CommitmentKey; using ProvingKey = typename Flavor::ProvingKey; diff --git a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp index 089c2c37451..44e736b71d5 100644 --- a/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/translator_vm/goblin_translator_verifier.cpp @@ -68,9 +68,8 @@ void GoblinTranslatorVerifier::put_translation_data_in_relation_parameters(const */ bool GoblinTranslatorVerifier::verify_proof(const plonk::proof& proof) { - transcript = std::make_shared(proof.proof_data); - - transcript = std::make_shared(proof.proof_data); + batching_challenge_v = transcript->get_challenge("Translation:batching_challenge"); + transcript->load_proof(proof.proof_data); Flavor::VerifierCommitments commitments{ key }; Flavor::CommitmentLabels commitment_labels; @@ -78,7 +77,6 @@ bool GoblinTranslatorVerifier::verify_proof(const plonk::proof& proof) // TODO(Adrian): Change the initialization of the transcript to take the VK hash? const auto circuit_size = transcript->template receive_from_prover("circuit_size"); evaluation_input_x = transcript->template receive_from_prover("evaluation_input_x"); - batching_challenge_v = transcript->template receive_from_prover("batching_challenge_v"); const BF accumulated_result = transcript->template receive_from_prover("accumulated_result"); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp index a27b2091f3a..671634a3073 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_prover.cpp @@ -94,7 +94,7 @@ template plonk::proof& MergeProver_::construct_proof() FF alpha = transcript->get_challenge("alpha"); - // Constuct batched polynomial to opened via KZG + // Construct batched polynomial to opened via KZG auto batched_polynomial = Polynomial(N); auto batched_eval = FF(0); auto alpha_pow = FF(1); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp index 1d913b342fb..c93257af7de 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/merge_verifier.cpp @@ -62,7 +62,7 @@ template bool MergeVerifier_::verify_proof(const plonk FF alpha = transcript->get_challenge("alpha"); - // Constuct batched commitment and evaluation from constituents + // Construct batched commitment and evaluation from constituents auto batched_commitment = opening_claims[0].commitment; auto batched_eval = opening_claims[0].opening_pair.evaluation; auto alpha_pow = alpha; diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp index 67848bfecd4..6dfcbe4c4eb 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/relation_correctness.test.cpp @@ -343,6 +343,8 @@ TEST_F(RelationCorrectnessTests, GoblinUltraRelationCorrectness) ensure_non_zero(proving_key->q_elliptic); ensure_non_zero(proving_key->q_aux); ensure_non_zero(proving_key->q_busread); + ensure_non_zero(proving_key->q_poseidon2_external); + ensure_non_zero(proving_key->q_poseidon2_internal); ensure_non_zero(proving_key->calldata); ensure_non_zero(proving_key->calldata_read_counts); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.cpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.cpp index 50f34044c29..7f318aa98a3 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.cpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_composer.cpp @@ -57,6 +57,8 @@ void UltraComposer_::compute_verification_key(const std::shared_ptrlagrange_ecc_op = commitment_key->commit(proving_key->lagrange_ecc_op); verification_key->q_busread = commitment_key->commit(proving_key->q_busread); verification_key->databus_id = commitment_key->commit(proving_key->databus_id); + verification_key->q_poseidon2_external = commitment_key->commit(proving_key->q_poseidon2_external); + verification_key->q_poseidon2_internal = commitment_key->commit(proving_key->q_poseidon2_internal); } instance->verification_key = std::move(verification_key); diff --git a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp index fb81addb7cc..dece05e520c 100644 --- a/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp +++ b/barretenberg/cpp/src/barretenberg/ultra_honk/ultra_prover.hpp @@ -42,9 +42,6 @@ template class UltraProver_ { std::shared_ptr transcript; - std::vector public_inputs; - size_t pub_inputs_offset; - proof_system::RelationParameters relation_parameters; CommitmentLabels commitment_labels; diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp index 913dc121988..b7daa36ddb3 100644 --- a/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp +++ b/barretenberg/cpp/src/barretenberg/vm/generated/AvmMini_prover.cpp @@ -3,7 +3,7 @@ #include "AvmMini_prover.hpp" #include "barretenberg/commitment_schemes/claim.hpp" #include "barretenberg/commitment_schemes/commitment_key.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" +#include "barretenberg/honk/proof_system/logderivative_library.hpp" #include "barretenberg/honk/proof_system/permutation_library.hpp" #include "barretenberg/honk/proof_system/power_polynomial.hpp" #include "barretenberg/polynomials/polynomial.hpp" diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp deleted file mode 100644 index 7a78c264e75..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.cpp +++ /dev/null @@ -1,85 +0,0 @@ - - -#include "./Fib_composer.hpp" -#include "barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp" -#include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/proof_system/composer/permutation_lib.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" - -namespace proof_system::honk { - -using Flavor = honk::flavor::FibFlavor; -void FibComposer::compute_witness(CircuitConstructor& circuit) -{ - if (computed_witness) { - return; - } - - auto polynomials = circuit.compute_polynomials(); - - proving_key->Fibonacci_LAST = polynomials.Fibonacci_LAST; - proving_key->Fibonacci_FIRST = polynomials.Fibonacci_FIRST; - proving_key->Fibonacci_x = polynomials.Fibonacci_x; - proving_key->Fibonacci_y = polynomials.Fibonacci_y; - - computed_witness = true; -} - -FibProver FibComposer::create_prover(CircuitConstructor& circuit_constructor) -{ - compute_proving_key(circuit_constructor); - compute_witness(circuit_constructor); - compute_commitment_key(circuit_constructor.get_circuit_subgroup_size()); - - FibProver output_state(proving_key, commitment_key); - - return output_state; -} - -FibVerifier FibComposer::create_verifier(CircuitConstructor& circuit_constructor) -{ - auto verification_key = compute_verification_key(circuit_constructor); - - FibVerifier output_state(verification_key); - - auto pcs_verification_key = std::make_unique(verification_key->circuit_size, crs_factory_); - - output_state.pcs_verification_key = std::move(pcs_verification_key); - - return output_state; -} - -std::shared_ptr FibComposer::compute_proving_key(CircuitConstructor& circuit_constructor) -{ - if (proving_key) { - return proving_key; - } - - // Initialize proving_key - { - const size_t subgroup_size = circuit_constructor.get_circuit_subgroup_size(); - proving_key = std::make_shared(subgroup_size, 0); - } - - proving_key->contains_recursive_proof = false; - - return proving_key; -} - -std::shared_ptr FibComposer::compute_verification_key(CircuitConstructor& circuit_constructor) -{ - if (verification_key) { - return verification_key; - } - - if (!proving_key) { - compute_proving_key(circuit_constructor); - } - - verification_key = - std::make_shared(proving_key->circuit_size, proving_key->num_public_inputs); - - return verification_key; -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp deleted file mode 100644 index 99c71c1913f..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.hpp +++ /dev/null @@ -1,69 +0,0 @@ - - -#pragma once - -#include "barretenberg/proof_system/circuit_builder/generated/Fib_circuit_builder.hpp" -#include "barretenberg/proof_system/composer/composer_lib.hpp" -#include "barretenberg/srs/global_crs.hpp" -#include "barretenberg/vm/generated/Fib_prover.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" - -namespace proof_system::honk { -class FibComposer { - public: - using Flavor = honk::flavor::FibFlavor; - using CircuitConstructor = FibCircuitBuilder; - using ProvingKey = Flavor::ProvingKey; - using VerificationKey = Flavor::VerificationKey; - using PCS = Flavor::PCS; - using CommitmentKey = Flavor::CommitmentKey; - using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; - - // TODO: which of these will we really need - static constexpr std::string_view NAME_STRING = "Fib"; - static constexpr size_t NUM_RESERVED_GATES = 0; - static constexpr size_t NUM_WIRES = Flavor::NUM_WIRES; - - std::shared_ptr proving_key; - std::shared_ptr verification_key; - - // The crs_factory holds the path to the srs and exposes methods to extract the srs elements - std::shared_ptr> crs_factory_; - - // The commitment key is passed to the prover but also used herein to compute the verfication key commitments - std::shared_ptr commitment_key; - - std::vector recursive_proof_public_input_indices; - bool contains_recursive_proof = false; - bool computed_witness = false; - - FibComposer() { crs_factory_ = barretenberg::srs::get_crs_factory(); } - - FibComposer(std::shared_ptr p_key, std::shared_ptr v_key) - : proving_key(std::move(p_key)) - , verification_key(std::move(v_key)) - {} - - FibComposer(FibComposer&& other) noexcept = default; - FibComposer(FibComposer const& other) noexcept = default; - FibComposer& operator=(FibComposer&& other) noexcept = default; - FibComposer& operator=(FibComposer const& other) noexcept = default; - ~FibComposer() = default; - - std::shared_ptr compute_proving_key(CircuitConstructor& circuit_constructor); - std::shared_ptr compute_verification_key(CircuitConstructor& circuit_constructor); - - void compute_witness(CircuitConstructor& circuit_constructor); - - FibProver create_prover(CircuitConstructor& circuit_constructor); - FibVerifier create_verifier(CircuitConstructor& circuit_constructor); - - void add_table_column_selector_poly_to_proving_key(barretenberg::polynomial& small, const std::string& tag); - - void compute_commitment_key(size_t circuit_size) - { - commitment_key = std::make_shared(circuit_size, crs_factory_); - }; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp deleted file mode 100644 index 36650f8ce61..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_composer.test.cpp +++ /dev/null @@ -1,51 +0,0 @@ -#include "barretenberg/vm/generated/Fib_composer.hpp" -#include "barretenberg/ecc/curves/bn254/fr.hpp" -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/proof_system/circuit_builder/generated/Fib_trace.hpp" -#include "barretenberg/proof_system/plookup_tables/types.hpp" -#include "barretenberg/sumcheck/sumcheck_round.hpp" -#include "barretenberg/vm/generated/Fib_prover.hpp" -#include "barretenberg/vm/generated/Fib_verifier.hpp" -#include -#include -#include -#include -#include - -using namespace proof_system::honk; - -namespace example_relation_honk_composer { - -class FibTests : public ::testing::Test { - protected: - // TODO(640): The Standard Honk on Grumpkin test suite fails unless the SRS is initialised for every test. - void SetUp() override { barretenberg::srs::init_crs_factory("../srs_db/ignition"); }; -}; - -namespace { -auto& engine = numeric::random::get_debug_engine(); -} - -TEST_F(FibTests, powdre2e) -{ - barretenberg::srs::init_crs_factory("../srs_db/ignition"); - - auto circuit_builder = proof_system::FibCircuitBuilder(); - - auto rows = proof_system::FibTraceBuilder::build_trace(); - circuit_builder.set_trace(std::move(rows)); - - auto composer = FibComposer(); - - bool circuit_gud = circuit_builder.check_circuit(); - ASSERT_TRUE(circuit_gud); - - auto prover = composer.create_prover(circuit_builder); - auto proof = prover.construct_proof(); - - auto verifier = composer.create_verifier(circuit_builder); - bool verified = verifier.verify_proof(proof); - ASSERT_TRUE(verified) << proof; -} - -} // namespace example_relation_honk_composer \ No newline at end of file diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp deleted file mode 100644 index b8cd3fe8907..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.cpp +++ /dev/null @@ -1,136 +0,0 @@ - - -#include "Fib_prover.hpp" -#include "barretenberg/commitment_schemes/claim.hpp" -#include "barretenberg/commitment_schemes/commitment_key.hpp" -#include "barretenberg/honk/proof_system/lookup_library.hpp" -#include "barretenberg/honk/proof_system/permutation_library.hpp" -#include "barretenberg/honk/proof_system/power_polynomial.hpp" -#include "barretenberg/polynomials/polynomial.hpp" -#include "barretenberg/proof_system/library/grand_product_library.hpp" -#include "barretenberg/relations/lookup_relation.hpp" -#include "barretenberg/relations/permutation_relation.hpp" -#include "barretenberg/sumcheck/sumcheck.hpp" - -namespace proof_system::honk { - -using Flavor = honk::flavor::FibFlavor; - -/** - * Create FibProver from proving key, witness and manifest. - * - * @param input_key Proving key. - * @param input_manifest Input manifest - * - * @tparam settings Settings class. - * */ -FibProver::FibProver(std::shared_ptr input_key, std::shared_ptr commitment_key) - : key(input_key) - , commitment_key(commitment_key) -{ - // TODO: take every polynomial and assign it to the key!! - - prover_polynomials.Fibonacci_LAST = key->Fibonacci_LAST; - prover_polynomials.Fibonacci_FIRST = key->Fibonacci_FIRST; - prover_polynomials.Fibonacci_x = key->Fibonacci_x; - prover_polynomials.Fibonacci_y = key->Fibonacci_y; - - prover_polynomials.Fibonacci_x = key->Fibonacci_x; - prover_polynomials.Fibonacci_x_shift = key->Fibonacci_x.shifted(); - - prover_polynomials.Fibonacci_y = key->Fibonacci_y; - prover_polynomials.Fibonacci_y_shift = key->Fibonacci_y.shifted(); - - // prover_polynomials.lookup_inverses = key->lookup_inverses; - // key->z_perm = Polynomial(key->circuit_size); - // prover_polynomials.z_perm = key->z_perm; -} - -/** - * @brief Add circuit size, public input size, and public inputs to transcript - * - */ -void FibProver::execute_preamble_round() -{ - const auto circuit_size = static_cast(key->circuit_size); - - transcript->send_to_verifier("circuit_size", circuit_size); -} - -/** - * @brief Compute commitments to the first three wires - * - */ -void FibProver::execute_wire_commitments_round() -{ - auto wire_polys = key->get_wires(); - auto labels = commitment_labels.get_wires(); - for (size_t idx = 0; idx < wire_polys.size(); ++idx) { - transcript->send_to_verifier(labels[idx], commitment_key->commit(wire_polys[idx])); - } -} - -/** - * @brief Run Sumcheck resulting in u = (u_1,...,u_d) challenges and all evaluations at u being calculated. - * - */ -void FibProver::execute_relation_check_rounds() -{ - using Sumcheck = sumcheck::SumcheckProver; - - auto sumcheck = Sumcheck(key->circuit_size, transcript); - auto alpha = transcript->get_challenge("alpha"); - - sumcheck_output = sumcheck.prove(prover_polynomials, relation_parameters, alpha); -} - -/** - * @brief Execute the ZeroMorph protocol to prove the multilinear evaluations produced by Sumcheck - * @details See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the unrolled protocol. - * - * */ -void FibProver::execute_zeromorph_rounds() -{ - ZeroMorph::prove(prover_polynomials.get_unshifted(), - prover_polynomials.get_to_be_shifted(), - sumcheck_output.claimed_evaluations.get_unshifted(), - sumcheck_output.claimed_evaluations.get_shifted(), - sumcheck_output.challenge, - commitment_key, - transcript); -} - -plonk::proof& FibProver::export_proof() -{ - proof.proof_data = transcript->proof_data; - return proof; -} - -plonk::proof& FibProver::construct_proof() -{ - // Add circuit size public input size and public inputs to transcript-> - execute_preamble_round(); - - // Compute wire commitments - execute_wire_commitments_round(); - - // TODO: not implemented for codegen just yet - // Compute sorted list accumulator and commitment - // execute_log_derivative_commitments_round(); - - // Fiat-Shamir: bbeta & gamma - // Compute grand product(s) and commitments. - // execute_grand_product_computation_round(); - - // Fiat-Shamir: alpha - // Run sumcheck subprotocol. - execute_relation_check_rounds(); - - // Fiat-Shamir: rho, y, x, z - // Execute Zeromorph multilinear PCS - execute_zeromorph_rounds(); - - return export_proof(); -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp deleted file mode 100644 index 7b9e3cc6862..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_prover.hpp +++ /dev/null @@ -1,62 +0,0 @@ - - -#pragma once -#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/plonk/proof_system/types/proof.hpp" -#include "barretenberg/relations/relation_parameters.hpp" -#include "barretenberg/sumcheck/sumcheck_output.hpp" -#include "barretenberg/transcript/transcript.hpp" - -namespace proof_system::honk { - -class FibProver { - - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using PCS = Flavor::PCS; - using PCSCommitmentKey = Flavor::CommitmentKey; - using ProvingKey = Flavor::ProvingKey; - using Polynomial = Flavor::Polynomial; - using ProverPolynomials = Flavor::ProverPolynomials; - using CommitmentLabels = Flavor::CommitmentLabels; - using Curve = Flavor::Curve; - using Transcript = Flavor::Transcript; - - public: - explicit FibProver(std::shared_ptr input_key, std::shared_ptr commitment_key); - - void execute_preamble_round(); - void execute_wire_commitments_round(); - void execute_relation_check_rounds(); - void execute_zeromorph_rounds(); - - plonk::proof& export_proof(); - plonk::proof& construct_proof(); - - std::shared_ptr transcript = std::make_shared(); - - std::vector public_inputs; - - proof_system::RelationParameters relation_parameters; - - std::shared_ptr key; - - // Container for spans of all polynomials required by the prover (i.e. all multivariates evaluated by Sumcheck). - ProverPolynomials prover_polynomials; - - CommitmentLabels commitment_labels; - - Polynomial quotient_W; - - sumcheck::SumcheckOutput sumcheck_output; - - std::shared_ptr commitment_key; - - using ZeroMorph = pcs::zeromorph::ZeroMorphProver_; - - private: - plonk::proof proof; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp deleted file mode 100644 index a47e2c0fdf0..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.cpp +++ /dev/null @@ -1,89 +0,0 @@ - - -#include "./Fib_verifier.hpp" -#include "barretenberg/commitment_schemes/zeromorph/zeromorph.hpp" -#include "barretenberg/honk/proof_system/power_polynomial.hpp" -#include "barretenberg/numeric/bitop/get_msb.hpp" -#include "barretenberg/transcript/transcript.hpp" - -using namespace barretenberg; -using namespace proof_system::honk::sumcheck; - -namespace proof_system::honk { -FibVerifier::FibVerifier(std::shared_ptr verifier_key) - : key(verifier_key) -{} - -FibVerifier::FibVerifier(FibVerifier&& other) noexcept - : key(std::move(other.key)) - , pcs_verification_key(std::move(other.pcs_verification_key)) -{} - -FibVerifier& FibVerifier::operator=(FibVerifier&& other) noexcept -{ - key = other.key; - pcs_verification_key = (std::move(other.pcs_verification_key)); - commitments.clear(); - return *this; -} - -/** - * @brief This function verifies an Fib Honk proof for given program settings. - * - */ -bool FibVerifier::verify_proof(const plonk::proof& proof) -{ - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Commitment = Flavor::Commitment; - // using Curve = Flavor::Curve; - // using ZeroMorph = pcs::zeromorph::ZeroMorphVerifier_; - using VerifierCommitments = Flavor::VerifierCommitments; - using CommitmentLabels = Flavor::CommitmentLabels; - using Transcript = Flavor::Transcript; - - RelationParameters relation_parameters; - - transcript = std::make_shared(proof.proof_data); - - VerifierCommitments commitments{ key }; - CommitmentLabels commitment_labels; - - const auto circuit_size = transcript->template receive_from_prover("circuit_size"); - - if (circuit_size != key->circuit_size) { - return false; - } - - // Get commitments to VM wires - commitments.Fibonacci_x = transcript->template receive_from_prover(commitment_labels.Fibonacci_x); - commitments.Fibonacci_y = transcript->template receive_from_prover(commitment_labels.Fibonacci_y); - - // Execute Sumcheck Verifier - auto sumcheck = SumcheckVerifier(circuit_size); - - auto alpha = transcript->get_challenge("alpha"); - auto [multivariate_challenge, claimed_evaluations, sumcheck_verified] = - sumcheck.verify(relation_parameters, alpha, transcript); - - // If Sumcheck did not verify, return false - if (sumcheck_verified.has_value() && !sumcheck_verified.value()) { - return false; - } - - // Execute ZeroMorph rounds. See https://hackmd.io/dlf9xEwhTQyE3hiGbq4FsA?view for a complete description of the - // unrolled protocol. - // NOTE: temporarily disabled - facing integration issues - // auto pairing_points = ZeroMorph::verify(commitments.get_unshifted(), - // commitments.get_to_be_shifted(), - // claimed_evaluations.get_unshifted(), - // claimed_evaluations.get_shifted(), - // multivariate_challenge, - // transcript); - - // auto verified = pcs_verification_key->pairing_check(pairing_points[0], pairing_points[1]); - // return sumcheck_verified.value() && verified; - return sumcheck_verified.value(); -} - -} // namespace proof_system::honk diff --git a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp b/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp deleted file mode 100644 index 303cb4fca70..00000000000 --- a/barretenberg/cpp/src/barretenberg/vm/generated/Fib_verifier.hpp +++ /dev/null @@ -1,33 +0,0 @@ - - -#pragma once -#include "barretenberg/flavor/generated/Fib_flavor.hpp" -#include "barretenberg/plonk/proof_system/types/proof.hpp" -#include "barretenberg/sumcheck/sumcheck.hpp" - -namespace proof_system::honk { -class FibVerifier { - using Flavor = honk::flavor::FibFlavor; - using FF = Flavor::FF; - using Commitment = Flavor::Commitment; - using VerificationKey = Flavor::VerificationKey; - using VerifierCommitmentKey = Flavor::VerifierCommitmentKey; - using Transcript = Flavor::Transcript; - - public: - explicit FibVerifier(std::shared_ptr verifier_key = nullptr); - FibVerifier(FibVerifier&& other) noexcept; - FibVerifier(const FibVerifier& other) = delete; - - FibVerifier& operator=(const FibVerifier& other) = delete; - FibVerifier& operator=(FibVerifier&& other) noexcept; - - bool verify_proof(const plonk::proof& proof); - - std::shared_ptr key; - std::map commitments; - std::shared_ptr pcs_verification_key; - std::shared_ptr transcript; -}; - -} // namespace proof_system::honk diff --git a/barretenberg/ts/CHANGELOG.md b/barretenberg/ts/CHANGELOG.md index 1b0d688a21b..fa0bef62e34 100644 --- a/barretenberg/ts/CHANGELOG.md +++ b/barretenberg/ts/CHANGELOG.md @@ -1,5 +1,48 @@ # Changelog +## [0.16.7](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.6...barretenberg.js-v0.16.7) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.6](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.5...barretenberg.js-v0.16.6) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.5](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.4...barretenberg.js-v0.16.5) (2023-12-06) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.4](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.3...barretenberg.js-v0.16.4) (2023-12-05) + + +### Miscellaneous + +* **barretenberg.js:** Synchronize aztec-packages versions + +## [0.16.3](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.2...barretenberg.js-v0.16.3) (2023-12-05) + + +### Miscellaneous + +* CLI's startup time was pushing almost 2s. This gets the basic 'help' down to 0.16. ([#3529](https://github.com/AztecProtocol/aztec-packages/issues/3529)) ([396df13](https://github.com/AztecProtocol/aztec-packages/commit/396df13389cdcb8b8b0d5a92a4b3d1c2bffcb7a7)) + +## [0.16.2](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.1...barretenberg.js-v0.16.2) (2023-12-05) + + +### Miscellaneous + +* Optimise bb.js package size and sandox/cli dockerfiles to unbloat final containers. ([#3462](https://github.com/AztecProtocol/aztec-packages/issues/3462)) ([cb3db5d](https://github.com/AztecProtocol/aztec-packages/commit/cb3db5d0f1f8912f1a97258e5043eb0f69eff551)) +* Pin node version in docker base images and bump nvmrc ([#3537](https://github.com/AztecProtocol/aztec-packages/issues/3537)) ([5d3895a](https://github.com/AztecProtocol/aztec-packages/commit/5d3895aefb7812eb6bd8017baf43533959ad69b4)) + ## [0.16.1](https://github.com/AztecProtocol/aztec-packages/compare/barretenberg.js-v0.16.0...barretenberg.js-v0.16.1) (2023-11-28) diff --git a/barretenberg/ts/Dockerfile b/barretenberg/ts/Dockerfile index 1edeb3c4377..d28b5661b2d 100644 --- a/barretenberg/ts/Dockerfile +++ b/barretenberg/ts/Dockerfile @@ -1,11 +1,11 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/barretenberg-wasm-linux-clang -FROM node:18-alpine +FROM node:18.19.0-alpine COPY --from=0 /usr/src/barretenberg /usr/src/barretenberg # Create a standalone container that can run bb.js (and tests). # We'll perform the build in a new, different directory, so the original directory can become the "published" package. -WORKDIR /usr/src/barretenberg/ts +WORKDIR /usr/src/barretenberg/ts-build # Leverage layer caching. Only re-install packages if these files change. COPY .yarn .yarn COPY package.json package.json @@ -17,4 +17,4 @@ RUN yarn formatting && SKIP_CPP_BUILD=1 yarn build CMD ["yarn", "test"] # We want to create a pure package, as would be published to npm, for consuming projects. -RUN yarn pack && tar zxf package.tgz && rm package.tgz \ No newline at end of file +RUN yarn pack && tar zxf package.tgz && rm package.tgz && mv package ../ts \ No newline at end of file diff --git a/barretenberg/ts/package.json b/barretenberg/ts/package.json index 64b06f27e09..79ba376f8ae 100644 --- a/barretenberg/ts/package.json +++ b/barretenberg/ts/package.json @@ -1,6 +1,6 @@ { "name": "@aztec/bb.js", - "version": "0.16.1", + "version": "0.16.7", "homepage": "https://github.com/AztecProtocol/aztec-packages/tree/master/barretenberg/ts", "license": "MIT", "type": "module", @@ -20,14 +20,13 @@ "README.md" ], "scripts": { - "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs package.tgz package", - "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser && yarn build:package", + "clean": "rm -rf ./dest .tsbuildinfo .tsbuildinfo.cjs", + "build": "yarn clean && yarn build:wasm && yarn build:esm && yarn build:cjs && yarn build:browser", "build:wasm": "./scripts/build_wasm.sh", "build:esm": "tsc -b && chmod +x ./dest/node/main.js", "build:cjs": "tsc -b tsconfig.cjs.json && ./scripts/cjs_postprocess.sh", "build:browser": "webpack", "build:bindings": "cd .. && ./scripts/bindgen.sh", - "build:package": "yarn pack && tar zxf package.tgz && rm -f package.tgz", "formatting": "prettier --check ./src && eslint --max-warnings 0 ./src", "formatting:fix": "prettier -w ./src", "test": "NODE_OPTIONS='--loader ts-node/esm' NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --no-cache --passWithNoTests", diff --git a/barretenberg/ts/src/barretenberg/index.ts b/barretenberg/ts/src/barretenberg/index.ts index 40b2ef0261e..6019b24e88f 100644 --- a/barretenberg/ts/src/barretenberg/index.ts +++ b/barretenberg/ts/src/barretenberg/index.ts @@ -41,7 +41,8 @@ export class Barretenberg extends BarretenbergApi { } } -let barretenbergSyncSingleton: Promise; +let barretenbergSyncSingleton: BarretenbergSync; +let barretenbergSyncSingletonPromise: Promise; export class BarretenbergSync extends BarretenbergApiSync { private constructor(wasm: BarretenbergWasmMain) { @@ -55,9 +56,16 @@ export class BarretenbergSync extends BarretenbergApiSync { return new BarretenbergSync(wasm); } + static initSingleton() { + if (!barretenbergSyncSingletonPromise) { + barretenbergSyncSingletonPromise = BarretenbergSync.new().then(s => (barretenbergSyncSingleton = s)); + } + return barretenbergSyncSingletonPromise; + } + static getSingleton() { if (!barretenbergSyncSingleton) { - barretenbergSyncSingleton = BarretenbergSync.new(); + throw new Error('Initialise first via initSingleton().'); } return barretenbergSyncSingleton; } @@ -66,3 +74,11 @@ export class BarretenbergSync extends BarretenbergApiSync { return this.wasm; } } + +// If we're loading this module in a test environment, just init the singleton immediately for convienience. +if (process.env.NODE_ENV === 'test') { + // Need to ignore for cjs build. + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore + await BarretenbergSync.initSingleton(); +} diff --git a/build-system/.gitrepo b/build-system/.gitrepo index 245270514ac..b1fdb9bd872 100644 --- a/build-system/.gitrepo +++ b/build-system/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/AztecProtocol/build-system branch = master - commit = c41347e5e28b61669766ae75317ccaf78e0d9750 - parent = ebdeea3f4bc721d5708b44ba1f89ba24eb0e25d5 + commit = 4e1ea3fc2cdbbfdb97d21b0920eb681ede20c333 + parent = 379b5adc259ac69b01e61b852172cdfc87cf9350 method = merge cmdver = 0.4.6 diff --git a/build-system/scripts/augment_prompt b/build-system/scripts/augment_prompt new file mode 100755 index 00000000000..e3dc524043b --- /dev/null +++ b/build-system/scripts/augment_prompt @@ -0,0 +1,2 @@ +# Used to augment the prompt when using start_interactive and zsh. +echo "b " \ No newline at end of file diff --git a/build-system/scripts/build b/build-system/scripts/build index 71826e74dbf..6906a275149 100755 --- a/build-system/scripts/build +++ b/build-system/scripts/build @@ -86,30 +86,59 @@ if [ -d $ROOT_PATH/$PROJECT_DIR/terraform ]; then popd fi -# For each dependency, pull in the latest image and give it correct tag. +# For each dependency, substitute references to the dependency in dockerfile, with the relevent built image uri. +# We have to perform a bit of probing to determine which actual image we want to use. +# When we used buildx to create a multiarch image, there will be no images with "-$ARCH" suffixes (normalise this?). +# Also we sometimes build an arm image from an x86 parent, so there won't always be an arm parent, and we fallback. for PARENT_REPO in $(query_manifest dependencies $REPOSITORY); do - PARENT_IMAGE_URI=$(calculate_image_uri $PARENT_REPO) - echo "Pulling dependency $PARENT_IMAGE_URI..." - if ! fetch_image $PARENT_IMAGE_URI; then - # This is a *bit* of a hack maybe. Some of our arm images can be built from x86 dependents. - # e.g. node projects are architecture independent. - # This may not hold true if we start introducing npm modules that are backed by native code. - # But for now, to avoid building some projects twice, we can fallback onto x86 variant. - PARENT_IMAGE_URI=$(calculate_image_uri $PARENT_REPO x86_64) - echo "Falling back onto x86 build. Pulling dependency $PARENT_IMAGE_URI..." - fetch_image $PARENT_IMAGE_URI + # We want the parent image tag without any arch suffix. + PARENT_IMAGE_TAG=$(calculate_image_tag $PARENT_REPO "") + + # Attempt to locate multiarch image. + if ! image_exists $PARENT_REPO $PARENT_IMAGE_TAG; then + # Attempt to locate our specific arch image. + PARENT_IMAGE_TAG=$(calculate_image_tag $PARENT_REPO) + if ! image_exists $PARENT_REPO $PARENT_IMAGE_TAG; then + # Finally attempt to locate x86_64 image tag, as sometimes we build arch specific images from x86_64 images. + PARENT_IMAGE_TAG=$(calculate_image_tag $PARENT_REPO x86_64) + if ! image_exists $PARENT_REPO $PARENT_IMAGE_TAG; then + echo "Failed to locate multiarch image, arch specific image, or x86_64 image. Aborting." + exit 1 + fi + fi fi - # Tag it to look like an official release as that's what we use in Dockerfiles. - TAG=$ECR_DEPLOY_URL/$PARENT_REPO - docker tag $PARENT_IMAGE_URI $TAG + + # Substitute references to parent repo, with the relevent built image uri. + DEPLOY_URI=$ECR_DEPLOY_URL/$PARENT_REPO + PARENT_IMAGE_URI=$ECR_URL/$PARENT_REPO:$PARENT_IMAGE_TAG + awk '{if ($1 == "FROM" && $2 == "'$DEPLOY_URI'") $2 = "'$PARENT_IMAGE_URI'"; print $0}' $DOCKERFILE > _temp && mv _temp $DOCKERFILE done COMMIT_TAG_VERSION=$(extract_tag_version $REPOSITORY false) echo "Commit tag version: $COMMIT_TAG_VERSION" -# Build the actual image and give it a commit tag. IMAGE_COMMIT_URI=$(calculate_image_uri $REPOSITORY) echo "Building image: $IMAGE_COMMIT_URI" -docker build -t $IMAGE_COMMIT_URI -f $DOCKERFILE --build-arg COMMIT_TAG=$COMMIT_TAG_VERSION --build-arg ARG_CONTENT_HASH=$CONTENT_HASH . -echo "Pushing image: $IMAGE_COMMIT_URI" -retry docker push $IMAGE_COMMIT_URI > /dev/null 2>&1 + +MULTIARCH=$(query_manifest multiarch $REPOSITORY) + +# Build the image. +if [ "$MULTIARCH" == "buildx" ]; then + # We've requested to use buildx. This will build both arch containers on the host machine using virtualization. + # The result is a single image tag that supports multiarch. + # This is the simplest approach for build jobs that are not too intensive. + docker buildx create --name builder --use + docker buildx inspect --bootstrap + docker buildx build -t $IMAGE_COMMIT_URI -f $DOCKERFILE --build-arg COMMIT_TAG=$COMMIT_TAG_VERSION --build-arg ARG_CONTENT_HASH=$CONTENT_HASH --platform linux/amd64,linux/arm64 . --push +else + # If multiarch is set to "host", the assumption is that we're doing multiple builds on different machine architectures + # in parallel, and that there is a another job that runs afterwards to combine them into a manifest. + # In this case we need to augment the image tag with the hosts architecture to ensure its uniqueness. + if [ "$MULTIARCH" == "host" ]; then + IMAGE_COMMIT_URI=$(calculate_image_uri $REPOSITORY host) + fi + + docker build -t $IMAGE_COMMIT_URI -f $DOCKERFILE --build-arg COMMIT_TAG=$COMMIT_TAG_VERSION --build-arg ARG_CONTENT_HASH=$CONTENT_HASH . + echo "Pushing image: $IMAGE_COMMIT_URI" + retry docker push $IMAGE_COMMIT_URI > /dev/null 2>&1 +fi diff --git a/build-system/scripts/build_local b/build-system/scripts/build_local index b4572cecee0..cdb9f3a60f3 100755 --- a/build-system/scripts/build_local +++ b/build-system/scripts/build_local @@ -98,7 +98,7 @@ for E in "${PROJECTS[@]}"; do echo -e "${YELLOW}Project or dependency has local modifications! Building...${RESET}" docker build ${ADDITIONAL_ARGS:-} --build-arg ARG_COMMIT_HASH=$COMMIT_HASH -f $DOCKERFILE -t $DEPLOY_IMAGE_URI . else - if [ -z "$NO_CACHE" ] && docker image ls --format "{{.Repository}}:{{.Tag}}" | grep -q -w $CACHE_IMAGE_URI; then + if [ -z "$NO_CACHE" ] && docker image ls --format "{{.Repository}}:{{.Tag}}" | grep -q -w "$CACHE_IMAGE_URI$"; then echo -e "${GREEN}Image exists locally. Tagging as $DEPLOY_IMAGE_URI${RESET}" docker tag $CACHE_IMAGE_URI $DEPLOY_IMAGE_URI else @@ -114,6 +114,9 @@ for E in "${PROJECTS[@]}"; do # Retag for aztecprotocol dockerhub. docker tag $DEPLOY_IMAGE_URI aztecprotocol/$REPO:latest + echo -e "${BOLD}Tagged${RESET}: aztecprotocol/$REPO:latest" + echo -e "${BOLD}SHA256${RESET}: $(docker inspect --format='{{.Id}}' $DEPLOY_IMAGE_URI)" + if [ "$PROJECT_DIR_NAME" = "$TARGET_PROJECT" ]; then if [ -n "$LAUNCH" ]; then docker run -ti --rm aztecprotocol/$REPO:latest diff --git a/build-system/scripts/calculate_content_hash b/build-system/scripts/calculate_content_hash index 630595aa371..28ae8ff2e62 100755 --- a/build-system/scripts/calculate_content_hash +++ b/build-system/scripts/calculate_content_hash @@ -1,21 +1,8 @@ #!/bin/bash - [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace -set -eu - -REPOSITORY=$1 -COMMIT_HASH=${2:-${COMMIT_HASH:-$(git rev-parse HEAD)}} - -# Get list of rebuild patterns, concat them with regex 'or' (|), and double escape \ for awk -v. -AWK_PATTERN=$(query_manifest rebuildPatterns $REPOSITORY | tr '\n' '|' | sed 's/\\/\\\\/g') -# Remove the trailing '|'. -AWK_PATTERN=${AWK_PATTERN%|} - -cd "$(git rev-parse --show-toplevel)" +set -euo pipefail -# an example line is +# An example line is: # 100644 da9ae2e020ea7fe3505488bbafb39adc7191559b 0 yarn-project/world-state/tsconfig.json -# this format is beneficial as it grabs the hashes from git efficiently -# we will next filter by our rebuild patterns -# then we pipe the hash portion of each file to git hash-object to produce our content hash -git ls-tree -r $COMMIT_HASH | awk -v pattern="($AWK_PATTERN)" '$4 ~ pattern {print $3}' | git hash-object --stdin \ No newline at end of file +# Extract the hashes and pipe the hash portion of each file to git hash-object to produce our content hash. +calculate_rebuild_files "$@" | awk '{print $3}' | git hash-object --stdin \ No newline at end of file diff --git a/build-system/scripts/calculate_image_tag b/build-system/scripts/calculate_image_tag index 2a7786ecdc1..c273648287c 100755 --- a/build-system/scripts/calculate_image_tag +++ b/build-system/scripts/calculate_image_tag @@ -1,9 +1,23 @@ #!/bin/bash +# Return a repositories build cache image tag based on content hash. +# If the second argument is set: +# It's used to suffix the tag with the given unique arch descriptor. +# Unless it's "host" in which case use the host machines arch as the arch descriptor. [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu REPOSITORY=$1 -ARCH=${2:-$(uname -m)} -[ "$ARCH" == "aarch64" ] && ARCH=arm64 +ARCH=${2:-} + CONTENT_HASH=$(calculate_content_hash $REPOSITORY) -echo "cache-$CONTENT_HASH-$ARCH" + +if [ -z "$ARCH" ]; then + echo "cache-$CONTENT_HASH" +else + if [ "$ARCH" == "host" ]; then + ARCH=$(uname -m) + fi + # Normalise. + [ "$ARCH" == "aarch64" ] && ARCH=arm64 + echo "cache-$CONTENT_HASH-$ARCH" +fi diff --git a/build-system/scripts/calculate_image_uri b/build-system/scripts/calculate_image_uri index fe60a9c05ab..8efd7ab4cdc 100755 --- a/build-system/scripts/calculate_image_uri +++ b/build-system/scripts/calculate_image_uri @@ -3,7 +3,6 @@ set -eu REPOSITORY=$1 -ARCH=${2:-$(uname -m)} -[ "$ARCH" == "aarch64" ] && ARCH=arm64 -CONTENT_HASH=$(calculate_content_hash $REPOSITORY) -echo "$ECR_URL/$REPOSITORY:cache-$CONTENT_HASH-$ARCH" + +TAG=$(calculate_image_tag "$@") +echo "$ECR_URL/$REPOSITORY:$TAG" diff --git a/build-system/scripts/calculate_rebuild_files b/build-system/scripts/calculate_rebuild_files new file mode 100755 index 00000000000..9be1afd4912 --- /dev/null +++ b/build-system/scripts/calculate_rebuild_files @@ -0,0 +1,20 @@ +#!/bin/bash + +[ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace +set -eu + +REPOSITORY=$1 +COMMIT_HASH=${2:-${COMMIT_HASH:-$(git rev-parse HEAD)}} + +# Get list of rebuild patterns, concat them with regex 'or' (|), and double escape \ for awk -v. +AWK_PATTERN=$(query_manifest rebuildPatterns $REPOSITORY | tr '\n' '|' | sed 's/\\/\\\\/g') +# Remove the trailing '|'. +AWK_PATTERN=${AWK_PATTERN%|} + +cd "$(git rev-parse --show-toplevel)" + +# An example line is: +# 100644 da9ae2e020ea7fe3505488bbafb39adc7191559b 0 yarn-project/world-state/tsconfig.json +# This format is beneficial as it grabs the hashes from git efficiently. +# We then filter by our rebuild patterns. +git ls-tree -r $COMMIT_HASH | awk -v pattern="($AWK_PATTERN)" '$4 ~ pattern {print $0}' \ No newline at end of file diff --git a/build-system/scripts/cond_spot_run_build b/build-system/scripts/cond_spot_run_build index 6e970387cce..3333e5dec7c 100755 --- a/build-system/scripts/cond_spot_run_build +++ b/build-system/scripts/cond_spot_run_build @@ -6,4 +6,4 @@ REPOSITORY=$1 CPUS=$2 ARCH=${3:-x86_64} -cond_spot_run_script $REPOSITORY $CPUS $ARCH build $REPOSITORY $ARCH | add_timestamps +cond_spot_run_script $REPOSITORY $CPUS $ARCH build $REPOSITORY | add_timestamps diff --git a/build-system/scripts/cond_spot_run_script b/build-system/scripts/cond_spot_run_script index 89d41c9af0e..4b1832127ac 100755 --- a/build-system/scripts/cond_spot_run_script +++ b/build-system/scripts/cond_spot_run_script @@ -19,8 +19,7 @@ CPUS=$2 ARCH=$3 shift 3 -# If the CPUS have a specific architecture assigned, we need to use that to build the success tag. -BASE_TAG=$(calculate_image_tag $REPOSITORY $ARCH) +BASE_TAG=$(calculate_image_tag $REPOSITORY) SUCCESS_TAG=$BASE_TAG if [ -n "${TAG_POSTFIX:-}" ]; then diff --git a/build-system/scripts/deploy_dockerhub b/build-system/scripts/deploy_dockerhub index ac99bbe4f1a..606e0952b5d 100755 --- a/build-system/scripts/deploy_dockerhub +++ b/build-system/scripts/deploy_dockerhub @@ -3,66 +3,48 @@ set -eu REPOSITORY=$1 -# TODO: Why even provide this? We can just figure it out by probing. -ARCH_LIST=${2:-} -# The tag to use for release images. Can be turned into an argument if needed, for now we only release 'latest'. -DIST_TAG="latest" - -function docker_or_dryrun { +function cmd_or_dryrun { if [ "$DRY_DEPLOY" -eq 1 ]; then - echo DRY RUN: docker $@ + echo DRY RUN: $@ else - retry docker $@ + retry $@ fi } echo "Repo: $REPOSITORY" -echo "Arch List: $ARCH_LIST" - -VERSION_TAG=$(extract_tag_version $REPOSITORY false) -# if no version tag, check if we're on `master` branch -if [[ -z "$VERSION_TAG" ]]; then - if [[ "$BRANCH" != "master" ]]; then - echo "No version tag found. Exiting" >&2 - exit 1 - fi - # if we're on master, use the DEPLOY_TAG as the version tag - VERSION_TAG=$DEPLOY_TAG +# Set tag to the commit tag (a version number) if we have one, otherwise branch name normalised to a tag format. +if [ -n "$COMMIT_TAG" ]; then + TAG=$(extract_tag_version $REPOSITORY true) +else + TAG=$(echo "$BRANCH" | sed 's/[^a-zA-Z0-9_.-]/_/g') fi -MANIFEST_DEPLOY_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:$VERSION_TAG -MANIFEST_DIST_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:$DIST_TAG - # Login to dockerhub and ecr dockerhub_login ecr_login -IFS=',' -for ARCH in $ARCH_LIST; do - IMAGE_COMMIT_URI=$(calculate_image_uri $REPOSITORY $ARCH) - retry docker pull $IMAGE_COMMIT_URI - - # Retag and push image. - IMAGE_DEPLOY_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:$VERSION_TAG-$ARCH - docker tag $IMAGE_COMMIT_URI $IMAGE_DEPLOY_URI - docker_or_dryrun push $IMAGE_DEPLOY_URI - - echo "Adding image $IMAGE_DEPLOY_URI to manifest list $MANIFEST_DEPLOY_URI..." - docker_or_dryrun manifest create $MANIFEST_DEPLOY_URI --amend $IMAGE_DEPLOY_URI - - # Add latest manifest if we're making a release. - if [[ "$VERSION_TAG" != $DEPLOY_TAG ]]; then - echo "Adding image $IMAGE_DEPLOY_URI to manifest list $MANIFEST_DIST_URI" - docker_or_dryrun manifest create $MANIFEST_DIST_URI --amend $IMAGE_DEPLOY_URI - fi -done - -docker_or_dryrun manifest push --purge $MANIFEST_DEPLOY_URI +# Install skopeo, and immediately hack it to newer version. +sudo apt install -y skopeo +retry wget https://github.com/lework/skopeo-binary/releases/download/v1.13.3/skopeo-linux-amd64 +chmod +x ./skopeo-linux-amd64 && sudo mv ./skopeo-linux-amd64 /usr/bin/skopeo + +# Publish with version or branch tag. +SRC_URI=$(calculate_image_uri $REPOSITORY) +DST_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:$TAG +cmd_or_dryrun skopeo copy --all docker://$SRC_URI docker://$DST_URI + +# If this was a versioned release, publish as latest. +if [ -n "$COMMIT_TAG" ]; then + SRC_URI=$DST_URI + DST_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:latest + cmd_or_dryrun skopeo copy --all docker://$SRC_URI docker://$DST_URI +fi -# Retag version as latest if we're making a release. -if [[ "$VERSION_TAG" != "$DEPLOY_TAG" ]]; then - echo "Tagging $MANIFEST_DEPLOY_URI as $DIST_TAG..." - docker_or_dryrun manifest push --purge $MANIFEST_DIST_URI +# If this was a deployment, publish with the deploy tag. +if [ -n "$DEPLOY_TAG" ]; then + SRC_URI=$DST_URI + DST_URI=$DOCKERHUB_ACCOUNT/$REPOSITORY:$DEPLOY_TAG + cmd_or_dryrun skopeo copy --all docker://$SRC_URI docker://$DST_URI fi diff --git a/build-system/scripts/extract_repo b/build-system/scripts/extract_repo index ecae3c84152..c2579b5a46f 100755 --- a/build-system/scripts/extract_repo +++ b/build-system/scripts/extract_repo @@ -16,7 +16,7 @@ TEMP_CONTAINER=$(docker create $IMAGE_COMMIT_URI) echo "Extracting $EXTRACT_FROM from $REPOSITORY to $EXTRACT_TO..." mkdir -p $EXTRACT_TO docker cp $TEMP_CONTAINER:$EXTRACT_FROM $EXTRACT_TO -docker rm -v $TEMP_CONTAINER > /dev/null +docker rm -v $TEMP_CONTAINER >/dev/null echo "Extracted contents:" ls -al $EXTRACT_TO diff --git a/build-system/scripts/query_manifest b/build-system/scripts/query_manifest index 0358fbf89cb..1fee0ee5759 100755 --- a/build-system/scripts/query_manifest +++ b/build-system/scripts/query_manifest @@ -4,7 +4,7 @@ set -eu CMD=$1 REPO=$2 -ROOT_PATH=${ROOT_PATH:-$PWD} +ROOT_PATH=${ROOT_PATH:-$(git rev-parse --show-toplevel)} MANIFEST=$ROOT_PATH/build_manifest.yml if [ $(yq "has(\"$REPO\")" $MANIFEST) == "false" ]; then @@ -126,4 +126,7 @@ case "$CMD" in if [ "$(git ls-tree HEAD $DIR | awk '{print $2}')" = "commit" ]; then git ls-tree HEAD $DIR | awk '{print $4}' fi + ;; + multiarch) + yq -r ".\"$REPO\".multiarch // false" $MANIFEST esac diff --git a/build-system/scripts/setup_env b/build-system/scripts/setup_env index a63b2a13428..8e4058a1c53 100755 --- a/build-system/scripts/setup_env +++ b/build-system/scripts/setup_env @@ -16,9 +16,13 @@ BRANCH=${5:-} PULL_REQUEST=${6:-} BASH_ENV=${BASH_ENV:-} +ROOT_PATH=$(git rev-parse --show-toplevel) BUILD_SYSTEM_PATH=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) -PROJECT=$(cat PROJECT) +PROJECT=$(cat $ROOT_PATH/PROJECT) COMMIT_MESSAGE=$(git log -n 1 --pretty=format:"%s" $COMMIT_HASH) +PATH=$PATH:$BUILD_SYSTEM_PATH/scripts + +export BRANCH echo "COMMIT_HASH=$COMMIT_HASH" echo "COMMIT_TAG=$COMMIT_TAG" @@ -33,31 +37,35 @@ if [[ "$COMMIT_MESSAGE" == *"[ci dry-deploy]"* ]]; then DRY_DEPLOY=1 fi -if [ -n "${COMMIT_TAG:-}" ]; then - # Extract the deploy env from the commit tag, if it has one, e.g. testnet. - # If we have one, we look something like v2.1.123-testnet.0. This is a "non production" release. - if [[ "$COMMIT_TAG" == *"-"* ]]; then - # Strips the trailing '.XX' from the end of the commit tag - TEMP=${COMMIT_TAG%.*} - # Strips the 'vX.Y.ZZZ-' from the front of the commit tag, leaving the e.g. 'testnet' - DEPLOY_ENV=${TEMP##*-} +if should_deploy; then + if [ -n "${COMMIT_TAG:-}" ]; then + # Extract the deploy env from the commit tag, if it has one, e.g. testnet. + # If we have one, we look something like v2.1.123-testnet.0. This is a "non production" release. + if [[ "$COMMIT_TAG" == *"-"* ]]; then + # Strips the trailing '.XX' from the end of the commit tag + TEMP=${COMMIT_TAG%.*} + # Strips the 'vX.Y.ZZZ-' from the front of the commit tag, leaving the e.g. 'testnet' + DEPLOY_ENV=${TEMP##*-} + else + DEPLOY_ENV=prod + fi else - DEPLOY_ENV=prod + # If we're on master, this is our devnet. + DEPLOY_ENV="dev" fi -else - # We have no commit tag, this is our devnet. - DEPLOY_ENV="dev" fi # - The deploy tag (used in api paths, subdomains), is a concatenation of the project name and deploy environment, # e.g. aztec-dev, aztec-testnet, or aztec-prod -DEPLOY_TAG=$PROJECT-$DEPLOY_ENV +if [ -n "${DEPLOY_ENV:-}" ]; then + DEPLOY_TAG=$PROJECT-$DEPLOY_ENV +fi if [ -z "$BASH_ENV" ]; then BASH_ENV=$(mktemp) fi -echo export ROOT_PATH=$PWD >> $BASH_ENV +echo export ROOT_PATH=$ROOT_PATH >> $BASH_ENV echo export BUILD_SYSTEM_PATH=$BUILD_SYSTEM_PATH >> $BASH_ENV echo export DOCKER_BUILDKIT=${DOCKER_BUILDKIT:-1} >> $BASH_ENV echo export BUILDKIT_PROGRESS=plain >> $BASH_ENV @@ -77,13 +85,11 @@ echo export COMMIT_TAG=$COMMIT_TAG >> $BASH_ENV echo "export COMMIT_MESSAGE='${COMMIT_MESSAGE//\'/\'\\\'\'}'" >> $BASH_ENV echo export JOB_NAME=$JOB_NAME >> $BASH_ENV echo export GIT_REPOSITORY_URL=$GIT_REPOSITORY_URL >> $BASH_ENV -echo export DEPLOY_ENV=$DEPLOY_ENV >> $BASH_ENV -echo export DEPLOY_TAG=$DEPLOY_TAG >> $BASH_ENV +echo export DEPLOY_ENV=${DEPLOY_ENV:-} >> $BASH_ENV +echo export DEPLOY_TAG=${DEPLOY_TAG:-} >> $BASH_ENV echo export BRANCH=$BRANCH >> $BASH_ENV echo export PULL_REQUEST=$PULL_REQUEST >> $BASH_ENV echo export DRY_DEPLOY=${DRY_DEPLOY:-0} >> $BASH_ENV -echo export FORK_API_KEY=${FORK_API_KEY:-} >> $BASH_ENV -echo export CONTRACT_PUBLISHER_PRIVATE_KEY=${CONTRACT_PUBLISHER_PRIVATE_KEY:-} >> $BASH_ENV # We want very strict failures on any failing command, undefined variable, or commands that pipe to other commands. echo set -euo pipefail >> $BASH_ENV diff --git a/build-system/scripts/should_deploy b/build-system/scripts/should_deploy index b5f198af06e..53f295b191f 100755 --- a/build-system/scripts/should_deploy +++ b/build-system/scripts/should_deploy @@ -1,14 +1,9 @@ #!/bin/bash # Returns success if we are expected to do a deployment. -# This is if we have a commit tag (release) or if we're on `master` branch (devnet deployment). - +# Right now, that's only if we're master. set -eu -# This is when we only want to deploy on master, not release with new COMMIT_TAG. -# Specifically for deploying devnet. -RELEASE=${1:-1} - -if [ -n "$COMMIT_TAG" ] && [ "$RELEASE" != "0" ] || [ "$BRANCH" = "master" ]; then +if [ "$BRANCH" == "master" ]; then exit 0 else exit 1 diff --git a/build-system/scripts/should_release b/build-system/scripts/should_release new file mode 100755 index 00000000000..5473fc6da76 --- /dev/null +++ b/build-system/scripts/should_release @@ -0,0 +1,9 @@ +#!/bin/bash +# Returns true if we are expected to proceed with a release job. +# Specifically if we have a commit tag, are master, or are being forced to release. +# This script should be used at the start of all release steps to early out PR runs. +# Later release steps may early out regardless, e.g. npm releases only run for tagged commits. +# Dockerhub publishing will publish for master, or PR's run with force-release, but only tag latest on tagged commits. +set -eu + +[[ -n "$COMMIT_TAG" || "$BRANCH" == "master" || "$COMMIT_MESSAGE" == *"[ci force-release]"* ]] \ No newline at end of file diff --git a/build-system/start_interactive b/build-system/start_interactive new file mode 100755 index 00000000000..2ed1f2df13a --- /dev/null +++ b/build-system/start_interactive @@ -0,0 +1,6 @@ +#!/bin/bash +# Starts an interactive shell with the build system initialised. +# Good for playing around with build system on development machines. + +source $(dirname "$0")/scripts/setup_env '' '' mainframe_$USER > /dev/null +PROMPT_LEAN_LEFT=augment_prompt $SHELL \ No newline at end of file diff --git a/build_manifest.yml b/build_manifest.yml index 73c33d472de..588c3c70a93 100644 --- a/build_manifest.yml +++ b/build_manifest.yml @@ -1,10 +1,41 @@ noir: - # Add less aggressive rebuildPatterns. buildDir: noir + rebuildPatterns: + - ^noir/Dockerfile + - ^noir/acvm-repo + - ^noir/compiler + - ^noir/aztec_macros + - ^noir/noir_stdlib + - ^noir/tooling/backend_interface + - ^noir/tooling/bb_abstraction_leaks + - ^noir/tooling/debugger + - ^noir/tooling/lsp + - ^noir/tooling/nargo + - ^noir/tooling/nargo_cli + - ^noir/tooling/nargo_toml + - ^noir/tooling/nargo_fmt + - ^noir/tooling/noirc_abi + multiarch: host noir-packages: buildDir: noir dockerfile: Dockerfile.packages + rebuildPatterns: + - ^noir/Dockerfile.packages + - ^noir/.yarn + - ^noir/.yarnrc.yml + - ^noir/package.json + - ^noir/yarn.lock + - ^noir/acvm-repo + - ^noir/compiler + - ^noir/aztec_macros + - ^noir/noir_stdlib + - ^noir/tooling/noir_codegen + - ^noir/tooling/noir_js + - ^noir/tooling/noir_js_backend_barretenberg + - ^noir/tooling/noir_js_types + - ^noir/tooling/noirc_abi + - ^noir/tooling/noirc_abi_wasm noir-acir-tests: buildDir: noir/test_programs @@ -92,27 +123,38 @@ yarn-project: buildDir: yarn-project rebuildPatterns: - ^yarn-project/.*\.(ts|tsx|js|cjs|mjs|json|html|md|sh|nr|toml)$ - - ^yarn-project/Dockerfile + - ^yarn-project/Dockerfile$ dependencies: - yarn-project-base +yarn-project-prod: + buildDir: yarn-project + dockerfile: Dockerfile.prod + rebuildPatterns: + - ^yarn-project/Dockerfile.prod + dependencies: + - yarn-project + multiarch: buildx + aztec-sandbox: buildDir: yarn-project projectDir: yarn-project/aztec-sandbox dependencies: - - yarn-project + - yarn-project-prod + multiarch: buildx aztec-faucet: buildDir: yarn-project projectDir: yarn-project/aztec-faucet dependencies: - - yarn-project + - yarn-project-prod cli: buildDir: yarn-project projectDir: yarn-project/cli dependencies: - - yarn-project + - yarn-project-prod + multiarch: buildx boxes-blank-react: buildDir: yarn-project diff --git a/circuits/cpp/src/aztec3/circuits/abis/historic_block_data.hpp b/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp similarity index 87% rename from circuits/cpp/src/aztec3/circuits/abis/historic_block_data.hpp rename to circuits/cpp/src/aztec3/circuits/abis/block_header.hpp index 7d07d16fe0a..c1e37ae856d 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/historic_block_data.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/block_header.hpp @@ -26,7 +26,7 @@ template struct BlockHeader { fr nullifier_tree_root = 0; fr contract_tree_root = 0; fr l1_to_l2_messages_tree_root = 0; - fr blocks_tree_root = 0; + fr archive_root = 0; fr private_kernel_vk_tree_root = 0; // TODO: future enhancement // Public data @@ -38,7 +38,7 @@ template struct BlockHeader { nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, - blocks_tree_root, + archive_root, private_kernel_vk_tree_root, public_data_tree_root, global_variables_hash); @@ -47,8 +47,7 @@ template struct BlockHeader { { return note_hash_tree_root == other.note_hash_tree_root && nullifier_tree_root == other.nullifier_tree_root && contract_tree_root == other.contract_tree_root && - l1_to_l2_messages_tree_root == other.l1_to_l2_messages_tree_root && - blocks_tree_root == other.blocks_tree_root && + l1_to_l2_messages_tree_root == other.l1_to_l2_messages_tree_root && archive_root == other.archive_root && private_kernel_vk_tree_root == other.private_kernel_vk_tree_root && public_data_tree_root == other.public_data_tree_root && global_variables_hash == other.global_variables_hash; @@ -62,7 +61,7 @@ template struct BlockHeader { nullifier_tree_root.assert_is_zero(); contract_tree_root.assert_is_zero(); l1_to_l2_messages_tree_root.assert_is_zero(); - blocks_tree_root.assert_is_zero(); + archive_root.assert_is_zero(); private_kernel_vk_tree_root.assert_is_zero(); public_data_tree_root.assert_is_zero(); global_variables_hash.assert_is_zero(); @@ -77,7 +76,7 @@ template struct BlockHeader { BlockHeader> data = { to_ct(note_hash_tree_root), to_ct(nullifier_tree_root), to_ct(contract_tree_root), - to_ct(l1_to_l2_messages_tree_root), to_ct(blocks_tree_root), to_ct(private_kernel_vk_tree_root), + to_ct(l1_to_l2_messages_tree_root), to_ct(archive_root), to_ct(private_kernel_vk_tree_root), to_ct(public_data_tree_root), to_ct(global_variables_hash), }; @@ -91,7 +90,7 @@ template struct BlockHeader { BlockHeader data = { to_nt(note_hash_tree_root), to_nt(nullifier_tree_root), to_nt(contract_tree_root), - to_nt(l1_to_l2_messages_tree_root), to_nt(blocks_tree_root), to_nt(private_kernel_vk_tree_root), + to_nt(l1_to_l2_messages_tree_root), to_nt(archive_root), to_nt(private_kernel_vk_tree_root), to_nt(public_data_tree_root), to_nt(global_variables_hash), }; @@ -106,7 +105,7 @@ template struct BlockHeader { nullifier_tree_root.set_public(); contract_tree_root.set_public(); l1_to_l2_messages_tree_root.set_public(); - blocks_tree_root.set_public(); + archive_root.set_public(); private_kernel_vk_tree_root.set_public(); public_data_tree_root.set_public(); global_variables_hash.set_public(); @@ -118,8 +117,8 @@ template struct BlockHeader { nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, - blocks_tree_root, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as - // it is not present in noir, + archive_root, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as + // it is not present in noir, public_data_tree_root, global_variables_hash }; } diff --git a/circuits/cpp/src/aztec3/circuits/abis/packers.hpp b/circuits/cpp/src/aztec3/circuits/abis/packers.hpp index 2ee93840908..cc73e4e9a15 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/packers.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/packers.hpp @@ -59,7 +59,7 @@ struct ConstantsPacker { NOTE_HASH_SUBTREE_HEIGHT, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT), NVP(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp index 380b1a2eec4..c50cb0e5572 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/base/base_rollup_inputs.hpp @@ -21,7 +21,7 @@ template struct BaseRollupInputs { AppendOnlyTreeSnapshot start_nullifier_tree_snapshot{}; AppendOnlyTreeSnapshot start_contract_tree_snapshot{}; fr start_public_data_tree_root{}; - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; std::array, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP> low_nullifier_leaf_preimages{}; std::array, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP> @@ -37,8 +37,7 @@ template struct BaseRollupInputs { std::array, MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP> new_public_data_reads_sibling_paths{}; - std::array, KERNELS_PER_BASE_ROLLUP> - blocks_tree_root_membership_witnesses{}; + std::array, KERNELS_PER_BASE_ROLLUP> archive_root_membership_witnesses{}; ConstantRollupData constants{}; @@ -48,7 +47,7 @@ template struct BaseRollupInputs { start_nullifier_tree_snapshot, start_contract_tree_snapshot, start_public_data_tree_root, - start_blocks_tree_snapshot, + start_archive_snapshot, low_nullifier_leaf_preimages, low_nullifier_membership_witness, new_commitments_subtree_sibling_path, @@ -56,7 +55,7 @@ template struct BaseRollupInputs { new_contracts_subtree_sibling_path, new_public_data_update_requests_sibling_paths, new_public_data_reads_sibling_paths, - blocks_tree_root_membership_witnesses, + archive_root_membership_witnesses, constants); boolean operator==(BaseRollupInputs const& other) const diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp index a1cb37b6a66..e77d45a492f 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/constant_rollup_data.hpp @@ -11,7 +11,7 @@ template struct ConstantRollupData { using fr = typename NCT::fr; // The very latest roots as at the very beginning of the entire rollup: - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; // Some members of this struct tbd: fr private_kernel_vk_tree_root = 0; @@ -21,7 +21,7 @@ template struct ConstantRollupData { GlobalVariables global_variables{}; - MSGPACK_FIELDS(start_blocks_tree_snapshot, + MSGPACK_FIELDS(start_archive_snapshot, private_kernel_vk_tree_root, public_kernel_vk_tree_root, base_rollup_vk_hash, diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp index c765c9d09ad..f57aa95da7a 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_inputs.hpp @@ -25,16 +25,16 @@ template struct RootRollupInputs { AppendOnlyTreeSnapshot start_l1_to_l2_messages_tree_snapshot{}; // inputs required to add the block hash - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; - std::array new_blocks_tree_sibling_path{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; + std::array new_archive_sibling_path{}; // For serialization, update with new fields MSGPACK_FIELDS(previous_rollup_data, new_l1_to_l2_messages, new_l1_to_l2_messages_tree_root_sibling_path, start_l1_to_l2_messages_tree_snapshot, - start_blocks_tree_snapshot, - new_blocks_tree_sibling_path); + start_archive_snapshot, + new_archive_sibling_path); bool operator==(RootRollupInputs const&) const = default; }; diff --git a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp index 74aff1ba5e8..4392632b69b 100644 --- a/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp +++ b/circuits/cpp/src/aztec3/circuits/abis/rollup/root/root_rollup_public_inputs.hpp @@ -43,8 +43,8 @@ template struct RootRollupPublicInputs { AppendOnlyTreeSnapshot start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot{}; AppendOnlyTreeSnapshot end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot{}; - AppendOnlyTreeSnapshot start_blocks_tree_snapshot{}; - AppendOnlyTreeSnapshot end_blocks_tree_snapshot{}; + AppendOnlyTreeSnapshot start_archive_snapshot{}; + AppendOnlyTreeSnapshot end_archive_snapshot{}; std::array calldata_hash{}; std::array l1_to_l2_messages_hash{}; @@ -68,8 +68,8 @@ template struct RootRollupPublicInputs { end_l1_to_l2_messages_tree_snapshot, start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, - start_blocks_tree_snapshot, - end_blocks_tree_snapshot, + start_archive_snapshot, + end_archive_snapshot, calldata_hash, l1_to_l2_messages_hash); @@ -88,7 +88,7 @@ template struct RootRollupPublicInputs { write(buf, start_public_data_tree_root); write(buf, start_l1_to_l2_messages_tree_snapshot); write(buf, start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot); - write(buf, start_blocks_tree_snapshot); + write(buf, start_archive_snapshot); write(buf, end_note_hash_tree_snapshot); write(buf, end_nullifier_tree_snapshot); write(buf, end_contract_tree_snapshot); @@ -97,7 +97,7 @@ template struct RootRollupPublicInputs { write(buf, end_public_data_tree_root); write(buf, end_l1_to_l2_messages_tree_snapshot); write(buf, end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot); - write(buf, end_blocks_tree_snapshot); + write(buf, end_archive_snapshot); // Stitching calldata hash together auto high_buffer = calldata_hash[0].to_buffer(); diff --git a/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp b/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp index 0e9c5394aa8..66614035e65 100644 --- a/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/kernel/public/.test.cpp @@ -333,7 +333,7 @@ PublicKernelInputs get_kernel_inputs_with_previous_kernel(NT::boolean privat .nullifier_tree_root = ++seed, .contract_tree_root = ++seed, .l1_to_l2_messages_tree_root = ++seed, - .blocks_tree_root = ++seed, + .archive_root = ++seed, .private_kernel_vk_tree_root = ++seed, .public_data_tree_root = ++seed, .global_variables_hash = ++seed, diff --git a/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp b/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp index 652e6b0ca52..1a5952cc4c4 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/base/.test.cpp @@ -628,7 +628,7 @@ TEST_F(base_rollup_tests, native_calldata_hash) // run_cbind(inputs, outputs); } -TEST_F(base_rollup_tests, native_compute_membership_blocks_tree_negative) +TEST_F(base_rollup_tests, native_compute_membership_archive_negative) { // WRITE a negative test that will fail the inclusion proof @@ -639,15 +639,15 @@ TEST_F(base_rollup_tests, native_compute_membership_blocks_tree_negative) BaseRollupInputs inputs = base_rollup_inputs_from_kernels(kernel_data); MemoryStore blocks_store; - auto blocks_tree = MerkleTree(blocks_store, BLOCKS_TREE_HEIGHT); + auto archive = MerkleTree(blocks_store, ARCHIVE_HEIGHT); // Create an INCORRECT sibling path for the note hash tree root in the historical tree roots. - auto hash_path = blocks_tree.get_sibling_path(0); - std::array sibling_path{}; - for (size_t i = 0; i < BLOCKS_TREE_HEIGHT; ++i) { + auto hash_path = archive.get_sibling_path(0); + std::array sibling_path{}; + for (size_t i = 0; i < ARCHIVE_HEIGHT; ++i) { sibling_path[i] = hash_path[i] + 1; } - inputs.blocks_tree_root_membership_witnesses[0] = { + inputs.archive_root_membership_witnesses[0] = { .leaf_index = 0, .sibling_path = sibling_path, }; diff --git a/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp b/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp index 1b2fa67ea7d..d6a7cb4af0e 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/base/native_base_rollup_circuit.cpp @@ -132,11 +132,11 @@ NT::fr calculate_commitments_subtree(DummyBuilder& builder, BaseRollupInputs con * @param constantBaseRollupData * @param baseRollupInputs */ -void perform_blocks_tree_membership_checks(DummyBuilder& builder, BaseRollupInputs const& baseRollupInputs) +void perform_archive_membership_checks(DummyBuilder& builder, BaseRollupInputs const& baseRollupInputs) { // For each of the historical_note_hash_tree_membership_checks, we need to do an inclusion proof // against the historical root provided in the rollup constants - auto historical_root = baseRollupInputs.constants.start_blocks_tree_snapshot.root; + auto historical_root = baseRollupInputs.constants.start_archive_snapshot.root; for (size_t i = 0; i < 2; i++) { // Rebuild the block hash @@ -155,8 +155,8 @@ void perform_blocks_tree_membership_checks(DummyBuilder& builder, BaseRollupInpu l1_to_l2_messages_tree_root, public_data_tree_root); - abis::MembershipWitness const historical_root_witness = - baseRollupInputs.blocks_tree_root_membership_witnesses[i]; + abis::MembershipWitness const historical_root_witness = + baseRollupInputs.archive_root_membership_witnesses[i]; check_membership(builder, previous_block_hash, @@ -524,7 +524,7 @@ BaseOrMergeRollupPublicInputs base_rollup_circuit(DummyBuilder& builder, BaseRol components::compute_kernels_calldata_hash(baseRollupInputs.kernel_data); // Perform membership checks that the notes provided exist within the historical trees data - perform_blocks_tree_membership_checks(builder, baseRollupInputs); + perform_archive_membership_checks(builder, baseRollupInputs); AggregationObject const aggregation_object = aggregate_proofs(baseRollupInputs); diff --git a/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp b/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp index ed6f6ca0c25..50f85996946 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/root/.test.cpp @@ -176,8 +176,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) // Create initial nullifier tree with 32 initial nullifiers auto nullifier_tree = get_initial_nullifier_tree_empty(); - MemoryStore blocks_tree_store; - MerkleTree blocks_tree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive(archive_store, ARCHIVE_HEIGHT); std::array kernels = { get_empty_kernel(), get_empty_kernel(), get_empty_kernel(), get_empty_kernel() @@ -192,9 +192,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) contract_tree.root(), l1_to_l2_messages_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, start_block_hash); - AppendOnlyTreeSnapshot start_blocks_tree_snapshot = { .root = blocks_tree.root(), - .next_available_leaf_index = 1 }; + archive.update_element(0, start_block_hash); + AppendOnlyTreeSnapshot start_archive_snapshot = { .root = archive.root(), .next_available_leaf_index = 1 }; // Create commitments for (size_t kernel_j = 0; kernel_j < 4; kernel_j++) { @@ -243,9 +242,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) contract_tree.root(), l1_to_l2_messages_tree.root(), public_data_tree.root()); - blocks_tree.update_element(1, end_block_hash); - AppendOnlyTreeSnapshot end_blocks_tree_snapshot = { .root = blocks_tree.root(), - .next_available_leaf_index = 2 }; + archive.update_element(1, end_block_hash); + AppendOnlyTreeSnapshot end_archive_snapshot = { .root = archive.root(), .next_available_leaf_index = 2 }; // Compute the end snapshot AppendOnlyTreeSnapshot const end_l1_to_l2_messages_tree_snapshot = { .root = l1_to_l2_messages_tree.root(), @@ -292,8 +290,8 @@ TEST_F(root_rollup_tests, native_root_missing_nullifier_logic) rootRollupInputs.previous_rollup_data[1].base_or_merge_rollup_public_inputs.end_contract_tree_snapshot); ASSERT_EQ(outputs.end_l1_to_l2_messages_tree_snapshot, end_l1_to_l2_messages_tree_snapshot); - ASSERT_EQ(outputs.start_blocks_tree_snapshot, start_blocks_tree_snapshot); - ASSERT_EQ(outputs.end_blocks_tree_snapshot, end_blocks_tree_snapshot); + ASSERT_EQ(outputs.start_archive_snapshot, start_archive_snapshot); + ASSERT_EQ(outputs.end_archive_snapshot, end_archive_snapshot); // Compute the expected calldata hash for the root rollup (including the l2 -> l1 messages) auto left = components::compute_kernels_calldata_hash({ kernels[0], kernels[1] }); diff --git a/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp b/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp index 3c801da601f..b4f876263d9 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/root/native_root_rollup_circuit.cpp @@ -116,10 +116,10 @@ RootRollupPublicInputs root_rollup_circuit(DummyBuilder& builder, RootRollupInpu right.end_public_data_tree_root); // Update the blocks tree - auto end_blocks_tree_snapshot = components::insert_subtree_to_snapshot_tree( + auto end_archive_snapshot = components::insert_subtree_to_snapshot_tree( builder, - rootRollupInputs.start_blocks_tree_snapshot, - rootRollupInputs.new_blocks_tree_sibling_path, + rootRollupInputs.start_archive_snapshot, + rootRollupInputs.new_archive_sibling_path, fr::zero(), block_hash, 0, @@ -140,8 +140,8 @@ RootRollupPublicInputs root_rollup_circuit(DummyBuilder& builder, RootRollupInpu .end_public_data_tree_root = right.end_public_data_tree_root, .start_l1_to_l2_messages_tree_snapshot = rootRollupInputs.start_l1_to_l2_messages_tree_snapshot, .end_l1_to_l2_messages_tree_snapshot = new_l1_to_l2_messages_tree_snapshot, - .start_blocks_tree_snapshot = rootRollupInputs.start_blocks_tree_snapshot, - .end_blocks_tree_snapshot = end_blocks_tree_snapshot, + .start_archive_snapshot = rootRollupInputs.start_archive_snapshot, + .end_archive_snapshot = end_archive_snapshot, .calldata_hash = components::compute_calldata_hash(rootRollupInputs.previous_rollup_data), .l1_to_l2_messages_hash = compute_messages_hash(rootRollupInputs.new_l1_to_l2_messages) }; diff --git a/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp b/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp index 72fee73a733..8526c02dd68 100644 --- a/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp +++ b/circuits/cpp/src/aztec3/circuits/rollup/test_utils/utils.cpp @@ -86,8 +86,8 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne { // @todo Look at the starting points for all of these. // By supporting as inputs we can make very generic tests, where it is trivial to try new setups. - MemoryStore blocks_tree_store; - MerkleTree blocks_tree = MerkleTree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive = MerkleTree(archive_store, ARCHIVE_HEIGHT); BaseRollupInputs baseRollupInputs = { .kernel_data = kernel_data, @@ -161,10 +161,10 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne contract_tree.root(), l1_to_l2_msg_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, block_hash); + archive.update_element(0, block_hash); - ConstantRollupData const constantRollupData = { .start_blocks_tree_snapshot = { - .root = blocks_tree.root(), + ConstantRollupData const constantRollupData = { .start_archive_snapshot = { + .root = archive.root(), .next_available_leaf_index = 1, } }; baseRollupInputs.constants = constantRollupData; @@ -176,7 +176,7 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne kernel_data[i].public_inputs.constants.block_header.nullifier_tree_root = nullifier_tree.root(); kernel_data[i].public_inputs.constants.block_header.contract_tree_root = contract_tree.root(); kernel_data[i].public_inputs.constants.block_header.l1_to_l2_messages_tree_root = l1_to_l2_msg_tree.root(); - kernel_data[i].public_inputs.constants.block_header.blocks_tree_root = blocks_tree.root(); + kernel_data[i].public_inputs.constants.block_header.archive_root = archive.root(); kernel_data[i].public_inputs.constants.block_header.public_data_tree_root = public_data_tree.root(); kernel_data[i].public_inputs.constants.block_header.global_variables_hash = prev_global_variables_hash; } @@ -208,12 +208,11 @@ BaseRollupInputs base_rollup_inputs_from_kernels(std::array kerne } // Get historical_root sibling paths - baseRollupInputs.blocks_tree_root_membership_witnesses[0] = { + baseRollupInputs.archive_root_membership_witnesses[0] = { .leaf_index = 0, - .sibling_path = get_sibling_path(blocks_tree, 0, 0), + .sibling_path = get_sibling_path(archive, 0, 0), }; - baseRollupInputs.blocks_tree_root_membership_witnesses[1] = - baseRollupInputs.blocks_tree_root_membership_witnesses[0]; + baseRollupInputs.archive_root_membership_witnesses[1] = baseRollupInputs.archive_root_membership_witnesses[0]; baseRollupInputs.kernel_data = kernel_data; @@ -378,8 +377,8 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, MemoryStore public_data_tree_store; MerkleTree public_data_tree(public_data_tree_store, PUBLIC_DATA_TREE_HEIGHT); - MemoryStore blocks_tree_store; - MerkleTree blocks_tree(blocks_tree_store, BLOCKS_TREE_HEIGHT); + MemoryStore archive_store; + MerkleTree archive(archive_store, ARCHIVE_HEIGHT); // Start blocks tree auto block_hash = compute_block_hash_with_globals(globals, @@ -388,16 +387,16 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, contract_tree.root(), l1_to_l2_msg_tree.root(), public_data_tree.root()); - blocks_tree.update_element(0, block_hash); + archive.update_element(0, block_hash); // Blocks tree snapshots - AppendOnlyTreeSnapshot const start_blocks_tree_snapshot = { - .root = blocks_tree.root(), + AppendOnlyTreeSnapshot const start_archive_snapshot = { + .root = archive.root(), .next_available_leaf_index = 1, }; // Blocks tree - auto blocks_tree_sibling_path = get_sibling_path(blocks_tree, 1, 0); + auto archive_sibling_path = get_sibling_path(archive, 1, 0); // l1 to l2 tree auto l1_to_l2_tree_sibling_path = @@ -414,8 +413,8 @@ RootRollupInputs get_root_rollup_inputs(utils::DummyBuilder& builder, .new_l1_to_l2_messages = l1_to_l2_messages, .new_l1_to_l2_messages_tree_root_sibling_path = l1_to_l2_tree_sibling_path, .start_l1_to_l2_messages_tree_snapshot = start_l1_to_l2_msg_tree_snapshot, - .start_blocks_tree_snapshot = start_blocks_tree_snapshot, - .new_blocks_tree_sibling_path = blocks_tree_sibling_path, + .start_archive_snapshot = start_archive_snapshot, + .new_archive_sibling_path = archive_sibling_path, }; return rootRollupInputs; } diff --git a/circuits/cpp/src/aztec3/constants.hpp b/circuits/cpp/src/aztec3/constants.hpp index 9ecb0b384d8..358164687e5 100644 --- a/circuits/cpp/src/aztec3/constants.hpp +++ b/circuits/cpp/src/aztec3/constants.hpp @@ -104,7 +104,7 @@ constexpr size_t NOTE_HASH_TREE_HEIGHT = 32; constexpr size_t PUBLIC_DATA_TREE_HEIGHT = 254; constexpr size_t NULLIFIER_TREE_HEIGHT = 20; constexpr size_t L1_TO_L2_MSG_TREE_HEIGHT = 16; -constexpr size_t BLOCKS_TREE_HEIGHT = 16; +constexpr size_t ARCHIVE_HEIGHT = 16; constexpr size_t ROLLUP_VK_TREE_HEIGHT = 8; // TODO: update diff --git a/cspell.json b/cspell.json index 35d7779e39d..22bb21ec9f1 100644 --- a/cspell.json +++ b/cspell.json @@ -5,9 +5,12 @@ "acir", "acvm", "archiver", + "assignement", "asyncify", + "auditability", "authwit", "autonat", + "autorun", "awslogs", "awsvpc", "aztecprotocol", @@ -15,17 +18,21 @@ "bbfree", "bbmalloc", "benesjan", + "bleurgh", "bodyparser", "bootnode", + "bootnodes", "Brillig", "Bufferable", "bufs", "buildkit", + "buildx", "bytecodes", "calldatacopy", "callstack", "callstacks", "camelcase", + "casemap", "cbind", "cbinds", "chainsafe", @@ -35,23 +42,37 @@ "cimg", "clonedeep", "clonedeepwith", + "cmd", + "cmds", "codegen", "comlink", "composability", + "composablity", "concat", "cond", + "counterparty", "cpus", "customizability", "danlee", "Daos", + "Datas", "dbanks", "decrementation", + "defi", "delegatecall", + "delegatecalls", + "deregistration", + "devex", + "devnet", "devs", "diffie", "direnv", "dockerfiles", "dockerhub", + "dockerized", + "doesnt", + "dont", + "elif", "entrypoints", "erc", "falsey", @@ -59,6 +80,8 @@ "filestat", "flatmap", "foundryup", + "frontend", + "frontends", "fullpath", "fuzzer", "fuzzers", @@ -66,8 +89,11 @@ "gitrepo", "grumpkin", "gtest", + "hardfork", "hardlinks", + "hashable", "hasher", + "headstart", "herskind", "ierc", "indexeddb", @@ -78,31 +104,45 @@ "keccak", "keypairs", "keyscan", + "kothari", + "Lasse", "leveldb", "leveldown", "leveljs", "libp", "linkability", + "lmdb", + "maddiaa", "memdown", + "memfs", "Merkle", "messagebox", "mimc", "mktemp", "mload", "mockify", + "monomorphization", + "monomorphize", "mplex", "msgpack", "muldiv", + "multiarch", "multivalue", "muxers", + "nada", + "namespacing", "Nargo", "nixpkgs", + "nodebuffer", + "noirc", "noirup", "nullifer", + "offchain", "otterscan", "outdir", "overlayfs", "pako", + "Palla", "parallelizable", "Pedersen", "permissionless", @@ -116,16 +156,24 @@ "preimage", "preimages", "prestat", + "println", "productionify", "protobuf", "proxied", "proxified", "proxify", + "pseudocode", "pubkey", "pxes", "quickstart", + "Quickstarts", "rahul", + "REALDIR", + "realpath", + "rebundle", "repr", + "Reserialize", + "retag", "rethrown", "rollup", "rollups", @@ -138,25 +186,43 @@ "sload", "snakecase", "solhint", + "SSTORE", + "staticcall", "stdlib", "struct", "structs", + "subarrays", + "subdir", + "sublabel", + "sublabels", + "subpackage", + "subpackages", "subrepo", + "subroot", "suyash", + "templating", + "tldr", "toplevel", "tparam", "transferables", "trivago", "tsbuildinfo", "tsdoc", + "typechain", "typecheck", + "typegen", "typeparam", "unexclude", "unexcluded", + "unprefixed", "unshield", "unshielding", + "unzipit", "upperfirst", + "usecase", + "usecases", "utxo", + "UTXOS", "vals", "viem", "wasms", @@ -165,69 +231,7 @@ "yamux", "yarnrc", "zerocash", - "zexe", - "Lasse", - "Palla", - "pseudocode", - "staticcall", - "subdir", - "noirc", - "devex", - "monomorphize", - "hashable", - "typechain", - "subroot", - "dont", - "monomorphization", - "println", - "usecase", - "doesnt", - "Quickstarts", - "headstart", - "kothari", - "autorun", - "realpath", - "namespacing", - "memfs", - "unzipit", - "Datas", - "nada", - "maddiaa", - "subpackage", - "subpackages", - "nodebuffer", - "devnet", - "Reserialize", - "tldr", - "subarrays", - "dockerized", - "elif", - "assignement", - "REALDIR", - "unprefixed", - "casemap", - "rebundle", - "sublabel", - "sublabels", - "retag", - "frontend", - "frontends", - "typegen", - "deregistration", - "SSTORE", - "UTXOS", - "defi", - "usecases", - "usecase", - "templating", - "bleurgh", - "offchain", - "delegatecalls", - "auditability", - "hardfork", - "composablity", - "counterparty", - "lmdb" + "zexe" ], "ignorePaths": [ "node_modules/", @@ -254,5 +258,7 @@ "lib", "*.cmake" ], - "flagWords": ["anonymous"] + "flagWords": [ + "anonymous" + ] } diff --git a/docs/docs/about_aztec/overview.mdx b/docs/docs/about_aztec/overview.mdx index 3fceee95c47..9bd2afacf19 100644 --- a/docs/docs/about_aztec/overview.mdx +++ b/docs/docs/about_aztec/overview.mdx @@ -33,7 +33,7 @@ Watch Zac, CEO of Aztec, describe our approach to building a privacy preserving ### Private-public Composability -You can watch Mike, Aztec PM, talk about public-private composablity in Aztec at Devcon here. +You can watch Mike, Aztec PM, talk about public-private composability in Aztec at Devcon here. vk relationships. diff --git a/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md b/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md index 751b209dc9b..50ea9ba1146 100644 --- a/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md +++ b/docs/docs/concepts/advanced/data_structures/indexed_merkle_tree.md @@ -14,7 +14,7 @@ This page will answer: - How indexed merkle trees work - How they can be used for membership exclusion proofs - How they can leverage batch insertions -- Tradoffs of using indexed merkle trees +- Tradeoffs of using indexed merkle trees The content was also covered in a presentation for the [Privacy + Scaling Explorations team at the Ethereum Foundation](https://pse.dev/). @@ -34,7 +34,7 @@ A sparse merkle tree (not every leaf stores a value): -In order to spend / modify a note in the private state tree, one must create a nullifier for it, and prove that the nullifier does not already exist in the nullifier tree. As nullifier trees are modelled as sparse merkle trees, non membership checks are (conceptually) trivial. +In order to spend / modify a note in the private state tree, one must create a nullifier for it, and prove that the nullifier does not already exist in the nullifier tree. As nullifier trees are modeled as sparse merkle trees, non membership checks are (conceptually) trivial. Data is stored at the leaf index corresponding to its value. E.g. if I have a sparse tree that can contain $2^{256}$ values and want to prove non membership of the value $2^{128}$. I can prove via a merkle membership proof that $tree\_values[2^{128}] = 0$, conversely if I can prove that $tree\_values[2^{128}] == 1$ I can prove that the item exists. @@ -155,8 +155,8 @@ Suppose we want to show that the value `20` doesn't exist in the tree. We just r - Special case, the low leaf is at the very end, so the new_value must be higher than all values in the tree: - $assert(low\_nullifier_{\textsf{value}} < new\_value_{\textsf{value}})$ - Else: - - $assert(low\_nullifier_{\textsf{value}} < low\_nullifier_{\textsf{value}})$ - - $assert(low\_nullifier_{\textsf{next\_value}} > low\_nullifier_{\textsf{value}})$ + - $assert(low\_nullifier_{\textsf{value}} < new\_value_{\textsf{value}})$ + - $assert(low\_nullifier_{\textsf{next\_value}} > new\_value_{\textsf{value}})$ This is already a massive performance improvement, however we can go further, as this tree is not sparse. We can perform batch insertions. @@ -282,7 +282,7 @@ From looking at the code above we can probably deduce why we need pending insert To perform batched insertions, our circuit must keep track of all values that are pending insertion. -- If the `low_nullifier_membership_witness` is identified to be nonsense ( all zeros, or has a leaf index of -1 ) we will know that this is an pending low nullifier read request and we will have to look within our pending subtree for the nearest low nullifier. +- If the `low_nullifier_membership_witness` is identified to be nonsense ( all zeros, or has a leaf index of -1 ) we will know that this is a pending low nullifier read request and we will have to look within our pending subtree for the nearest low nullifier. - Loop back through all "pending_insertions" - If the pending insertion value is lower than the nullifier we are trying to insert - If the pending insertion value is NOT found, then out circuit is invalid and should self abort. diff --git a/docs/docs/concepts/advanced/public_vm.md b/docs/docs/concepts/advanced/public_vm.md index a19a5625dfb..df93a070c6e 100644 --- a/docs/docs/concepts/advanced/public_vm.md +++ b/docs/docs/concepts/advanced/public_vm.md @@ -69,7 +69,7 @@ It verifies a _verifier circuit_ that verifies a public function proof! Why? Modularity, ease of development, backwards compatibility support. -Proceed with following development phases: +Proceed with the following development phases: #### Phase 0: Full Proverless diff --git a/docs/docs/concepts/advanced/sequencer_selection.md b/docs/docs/concepts/advanced/sequencer_selection.md index 4bf211eb4c1..72937391468 100644 --- a/docs/docs/concepts/advanced/sequencer_selection.md +++ b/docs/docs/concepts/advanced/sequencer_selection.md @@ -40,7 +40,7 @@ During the initial proposal phase, proposers submit to L1 a **block commitment** - Identifier of the previous block in the chain. - The output of the VRF for this sequencer. -At the end of the proposal phase, the sequencer with the highest score submitted becomes the leader for this cycle, and has exclusive rights to deciding the contents of the block. Note that this plays nicely with private mempools, since having exclusive rights allows the leader to disclose private transaction data in the reveal phase. +At the end of the proposal phase, the sequencer with the highest score submitted becomes the leader for this cycle, and has exclusive rights to decide the contents of the block. Note that this plays nicely with private mempools, since having exclusive rights allows the leader to disclose private transaction data in the reveal phase. > _In the original version of Fernet, multiple competing proposals could enter the proving phase. Read more about the rationale for this change [here](https://hackmd.io/0cI_xVsaSVi7PToCJ9A2Ew?both#Mitigation-Elect-single-leader-after-proposal-phase)._ @@ -79,15 +79,15 @@ The only way to trigger an L2 reorg (without an L1 one) is if block N is reveale ![](https://hackmd.io/_uploads/HkMDHxxC2.png) -To mitigate the effect of wasted effort by all sequencers from block N+1 until the reorg, we could implement uncle rewards for these sequencers. And if we are comfortable with slashing, take those rewards out of the pocket of the sequencer that failed to finalise their block. +To mitigate the effect of wasted effort by all sequencers from block N+1 until the reorg, we could implement uncle rewards for these sequencers. And if we are comfortable with slashing, take those rewards out of the pocket of the sequencer that failed to finalize their block. ## Batching > _Read more approaches to batching [here](https://hackmd.io/0cI_xVsaSVi7PToCJ9A2Ew?both#Batching)._ -As an extension to the protocol, we can bake in batching of multiple blocks. Rather than creating one proof per block, we can aggregate multiple blocks into a single proof, in order to amortise the cost of verifying the root rollup ZKP on L1, thus reducing fees. +As an extension to the protocol, we can bake in batching of multiple blocks. Rather than creating one proof per block, we can aggregate multiple blocks into a single proof, in order to amortize the cost of verifying the root rollup ZKP on L1, thus reducing fees. -The tradeoff in batching is delayed finalisation: if we are not posting proofs to L1 for every block, then the network needs to wait until the batch proof is submitted for finalisation. This can also lead to deeper L2 reorgs. +The tradeoff in batching is delayed finalization: if we are not posting proofs to L1 for every block, then the network needs to wait until the batch proof is submitted for finalization. This can also lead to deeper L2 reorgs. In a batching model, proving for each block happens immediately as the block is revealed, same as usual. But the resulting proof is not submitted to L1: instead, it is aggregated into the proof of the next block. diff --git a/docs/docs/concepts/foundation/accounts/keys.md b/docs/docs/concepts/foundation/accounts/keys.md index 589667cbab3..cd8744d0e62 100644 --- a/docs/docs/concepts/foundation/accounts/keys.md +++ b/docs/docs/concepts/foundation/accounts/keys.md @@ -65,7 +65,7 @@ A side effect of enshrining and encoding privacy keys into the account address i ### Encryption keys -The privacy master key is used to derive encryption keys. Encryption keys, as their name imply, are used for encrypting private notes for a recipient, where the public key is used for encryption and the corresponding private key used for decryption. +The privacy master key is used to derive encryption keys. Encryption keys, as their name implies, are used for encrypting private notes for a recipient, where the public key is used for encryption and the corresponding private key used for decryption. In a future version, encryption keys will be differentiated between incoming and outgoing. When sending a note to another user, the sender will use the recipient's incoming encryption key for encrypting the data for them, and will optionally use their own outgoing encryption key for encrypting any data about the destination of that note. This is useful for reconstructing transaction history from on-chain data. For example, during a token transfer, the token contract may dictate that the sender encrypts the note with value with the recipient's incoming key, but also records the transfer with its own outgoing key for bookkeeping purposes. @@ -106,4 +106,4 @@ Nevertheless, the attacker cannot steal the affected user's funds, since authent :::info Note that, in the current architecture, the user's wallet needs direct access to the privacy private key, since the wallet needs to use this key for attempting decryption of all notes potentially sent to the user. This means that the privacy private key cannot be stored in a hardware wallet or hardware security module, since the wallet software uses the private key material directly. This may change in future versions in order to enhance security. -::: \ No newline at end of file +::: diff --git a/docs/docs/concepts/foundation/communication/cross_chain_calls.md b/docs/docs/concepts/foundation/communication/cross_chain_calls.md index 44e952a68c1..bff7ed5b936 100644 --- a/docs/docs/concepts/foundation/communication/cross_chain_calls.md +++ b/docs/docs/concepts/foundation/communication/cross_chain_calls.md @@ -73,7 +73,7 @@ In a logical sense, a Message Box functions as a one-way message passing mechani - At some point, a rollup will be executed, in this step messages are "moved" from pending on Domain A, to ready on Domain B. Note that consuming the message is "pulling & deleting" (or nullifying). The action is atomic, so a message that is consumed from the pending set MUST be added to the ready set, or the state transition should fail. A further constraint on moving messages along the way, is that only messages where the `sender` and `recipient` pair exists in a leaf in the contracts tree are allowed! - When the message have been added to the ready set, the `recipient` can consume the message as part of a function call. -Something that might seem weird when comparing to other cross-chain setups, is that we are "pulling" messages, and that the message don't need to be calldata for a function call. For _Arbitrum_ and the like, execution is happening FROM the "message bridge", which then calls the L1 contract. For us, you call the L1 contract, and it should then consume messages from the message box. +Something that might seem weird when compared to other cross-chain setups, is that we are "pulling" messages, and that the message don't need to be calldata for a function call. For _Arbitrum_ and the like, execution is happening FROM the "message bridge", which then calls the L1 contract. For us, you call the L1 contract, and it should then consume messages from the message box. Why? _Privacy_! When pushing, we would be needing full `calldata`. Which for functions with private inputs is not really something we want as that calldata for L1 -> L2 transactions are committed to on L1, e.g., publicly sharing the inputs to a private function. By instead pulling, we can have the "message" be something that is derived from the arguments instead. This way, a private function to perform second half of a deposit, could leak the "value" deposited and "who" made the deposit (as this is done on L1), but the new owner can be hidden on L2. @@ -107,7 +107,7 @@ For the sake of cross-chain messages, this means inserting and nullifying L1 $\r ### Messages -While a message could theoretically be arbitrary long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. +While a message could theoretically be arbitrarily long, we want to limit the cost of the insertion on L1 as much as possible. Therefore, we allow the users to send 32 bytes of "content" between L1 and L2. If 32 suffices, no packing required. If the 32 is too "small" for the message directly, the sender should simply pass along a `sha256(content)` instead of the content directly (note that this hash should fit in a field element which is ~254 bits. More info on this below). The content can then either be emitted as an event on L2 or kept by the sender, who should then be the only entity that can "unpack" the message. In this manner, there is some way to "unpack" the content on the receiving domain. The message that is passed along, require the `sender/recipient` pair to be communicated as well (we need to know who should receive the message and be able to check). By having the pending messages be a contract on L1, we can ensure that the `sender = msg.sender` and let only `content` and `recipient` be provided by the caller. Summing up, we can use the struct's seen below, and only store the commitment (`sha256(LxToLyMsg)`) on chain or in the trees, this way, we need only update a single storage slot per message. @@ -159,7 +159,7 @@ The following diagram shows the overall architecture, combining the earlier sect As mentioned earlier, there will be a link between L1 and L2 contracts (with the L1 part of the link being the portal contract), this link is created at "birth" when the contract leaf is inserted. However, the specific requirements of the link is not yet fully decided. And we will outline a few options below. -The reasoning behind having a link, comes from the difficulty of L2 access control (see "A note on L2 access control"). By having a link that only allows 1 contract (specified at deployment) to send messages to the L2 contract makes this issue "go away" from the application developers point of view as the message could only come from the specified contract. The complexity is moved to the protocol layer, that must now ensure that messages to the L2 contract are only sent from the specified L1 contract. +The reasoning behind having a link, comes from the difficulty of L2 access control (see "A note on L2 access control"). By having a link that only allows 1 contract (specified at deployment) to send messages to the L2 contract makes this issue "go away" from the application developers point of view as the message could only come from the specified contract. The complexity is moved to the protocol layer, which must now ensure that messages to the L2 contract are only sent from the specified L1 contract. :::info The design space for linking L1 and L2 contracts is still open, and we are looking into making access control more efficient to use in the models. @@ -179,7 +179,7 @@ From the L2 contract receiving messages, this model is very similar to the 1:1, When the L1 contract can itself handle where messages are coming from (it could before as well but useless as only 1 address could send), we don't need to worry about it being in only a single pair. The circuits can therefore simply insert the contract leafs without requiring it to ensure that neither have been used before. -With many L2's reading from the same L1, we can also more easily setup generic bridges (with many assets) living in a single L1 contract but minting multiple L2 assets, as the L1 contract can handle the access control and the L2's simply point to it as the portal. This reduces complexity of the L2 contracts as all access control is handled by the L1 contract. +With many L2's reading from the same L1, we can also more easily setup generic bridges (with many assets) living in a single L1 contract but minting multiple L2 assets, as the L1 contract can handle the access control and the L2's simply point to it as the portal. This reduces the complexity of the L2 contracts as all access control is handled by the L1 contract. ## Open Questions diff --git a/docs/docs/concepts/foundation/communication/main.md b/docs/docs/concepts/foundation/communication/main.md index 1f85585dacc..99588c9bc7c 100644 --- a/docs/docs/concepts/foundation/communication/main.md +++ b/docs/docs/concepts/foundation/communication/main.md @@ -2,8 +2,8 @@ title: Contract Communication --- -This section will walk over communication types that behaves differently than normal function calls from. +This section will walk over communication types that behaves differently than normal function calls. Namely, if functions are in different domains, private vs. public, their execution behaves a little differently to what you might expect! See [Private <--> Public execution](./public_private_calls/main.md). -Likewise, executing a function on a different domain than its origin needs a bit extra thought. See [L1 <--> L2 communication](./cross_chain_calls.md). \ No newline at end of file +Likewise, executing a function on a different domain than its origin needs a bit extra thought. See [L1 <--> L2 communication](./cross_chain_calls.md). diff --git a/docs/docs/concepts/foundation/transactions.md b/docs/docs/concepts/foundation/transactions.md index 7e44497ae07..9575ab4d6db 100644 --- a/docs/docs/concepts/foundation/transactions.md +++ b/docs/docs/concepts/foundation/transactions.md @@ -15,13 +15,13 @@ On this page you'll learn: See [this diagram](https://raw.githubusercontent.com/AztecProtocol/aztec-packages/2fa143e4d88b3089ebbe2a9e53645edf66157dc8/docs/static/img/sandbox_sending_a_tx.svg) for an in-depth overview of the transaction execution process. It highlights 3 different types of transaction execution: contract deployments, private transactions and public transactions. -See the page on [contract communication](./communication/main.md) for more context on transactions execution. +See the page on [contract communication](./communication/main.md) for more context on transaction execution. ## Enabling Transaction Semantics: The Aztec Kernel There are two kernel circuits in Aztec, the private kernel and the public kernel. Each circuit validates the correct execution of a particular function call. -A transaction is built up by generating proofs for multiple recursive iterations of kernel circuits. Each call in the call stack is modelled as new iteration of the kernel circuit and are managed by a [FIFO]() queue containing pending function calls. There are two call stacks, one for private calls and one for public calls. +A transaction is built up by generating proofs for multiple recursive iterations of kernel circuits. Each call in the call stack is modeled as new iteration of the kernel circuit and are managed by a [FIFO]() queue containing pending function calls. There are two call stacks, one for private calls and one for public calls. One iteration of a kernel circuit will pop a call off of the stack and execute the call. If the call triggers subsequent contract calls, these are pushed onto the stack. diff --git a/docs/docs/dev_docs/cli/cli-commands.md b/docs/docs/dev_docs/cli/cli-commands.md index 51b6f4799ea..cbf8afff54b 100644 --- a/docs/docs/dev_docs/cli/cli-commands.md +++ b/docs/docs/dev_docs/cli/cli-commands.md @@ -76,7 +76,7 @@ Let's double check that the accounts have been registered with the sandbox using #include_code get-accounts yarn-project/end-to-end/src/cli_docs_sandbox.test.ts bash -You will see a that a number of accounts exist that we did not create. The Sandbox initializes itself with 3 default accounts. Save one of the printed accounts (not the one that you generated above) in an environment variable. We will use it later. +You will see that a number of accounts exist that we did not create. The Sandbox initializes itself with 3 default accounts. Save one of the printed accounts (not the one that you generated above) in an environment variable. We will use it later. ```bash export ADDRESS2= diff --git a/docs/docs/dev_docs/contracts/portals/inbox.md b/docs/docs/dev_docs/contracts/portals/inbox.md index dad45a81583..ea8a3609830 100644 --- a/docs/docs/dev_docs/contracts/portals/inbox.md +++ b/docs/docs/dev_docs/contracts/portals/inbox.md @@ -16,7 +16,7 @@ Sends a message from L1 to L2. | Name | Type | Description | | -------------- | ------- | ----------- | | Recipient | `L2Actor` | The recipient of the message. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Deadline | `uint256` | The message consumption deadline. If the message have not been removed from the `Inbox` and included in a rollup block by this point, it can be *cancelled* by the portal (the portal must implement logic to cancel). | +| Deadline | `uint256` | The message consumption deadline. If the message have not been removed from the `Inbox` and included in a rollup block by this point, it can be *canceled* by the portal (the portal must implement logic to cancel). | | Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field for rollup purposes. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) | | Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use the [`computeMessageSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec.js/src/utils/secrets.ts) to compute it from a secret. | | Fee (msg.value) | `uint256` | The fee to the sequencer for including the message. This is the amount of ETH that the sequencer will receive for including the message. Note that only values that can fit in `uint64` will be accepted | @@ -110,4 +110,4 @@ Computes the hash of a message. | Name | Type | Description | | -------------- | ------- | ----------- | | `_message` | `L1ToL2Msg` | The message to compute hash for | -| ReturnValue | `bytes32` | The hash of the message | \ No newline at end of file +| ReturnValue | `bytes32` | The hash of the message | diff --git a/docs/docs/dev_docs/contracts/portals/main.md b/docs/docs/dev_docs/contracts/portals/main.md index e636d99bd63..63273257843 100644 --- a/docs/docs/dev_docs/contracts/portals/main.md +++ b/docs/docs/dev_docs/contracts/portals/main.md @@ -5,7 +5,7 @@ description: Documentation of Aztec's Portals and Cross-chain communication. ## What is a portal -A portal is the point of contact between L1 and a specific contract on Aztec. For applications such as token bridges, this is the point where the tokens are are held on L1 while used in L2. +A portal is the point of contact between L1 and a specific contract on Aztec. For applications such as token bridges, this is the point where the tokens are held on L1 while used in L2. As outlined in the [foundational concepts](../../../concepts/foundation/communication/cross_chain_calls.md), an Aztec L2 contract is linked to _ONE_ L1 address at time of deployment (specified by the developer). This L1 address is the only address that can send messages to that specific L2 contract, and the only address that can receive messages sent from the L2 contract to L1. Note, that a portal don't actually need to be a contract, it could be any address on L1. We say that an Aztec contract is attached to a portal. @@ -20,7 +20,7 @@ When sending messages, we need to specify quite a bit of information beyond just | Name | Type | Description | | ----------- | ------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | | Recipient | `L2Actor` | The message recipient. This **MUST** match the rollup version and an Aztec contract that is **attached** to the contract making this call. If the recipient is not attached to the caller, the message cannot be consumed by it. | -| Deadline | `uint256` | The deadline for the message to be consumed. If the message has not been removed from the `Inbox` and included in a rollup block by this point, it can be _cancelled_ by the portal (the portal must implement logic to cancel). | +| Deadline | `uint256` | The deadline for the message to be consumed. If the message has not been removed from the `Inbox` and included in a rollup block by this point, it can be _canceled_ by the portal (the portal must implement logic to cancel). | | Content | `field` (~254 bits) | The content of the message. This is the data that will be passed to the recipient. The content is limited to be a single field. If the content is small enough it can just be passed along, otherwise it should be hashed and the hash passed along (you can use our [`Hash`](https://github.com/AztecProtocol/aztec-packages/blob/master/l1-contracts/src/core/libraries/Hash.sol) utilities with `sha256ToField` functions) | | Secret Hash | `field` (~254 bits) | A hash of a secret that is used when consuming the message on L2. Keep this preimage a secret to make the consumption private. To consume the message the caller must know the pre-image (the value that was hashed) - so make sure your app keeps track of the pre-images! Use the [`computeMessageSecretHash`](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec.js/src/utils/secrets.ts) to compute it from a secret. | | Fee | `uint64` | The fee to the sequencer for including the message. This is the amount of ETH that the sequencer will receive for including the message. Note that it is not a full `uint256` but only `uint64` | @@ -154,7 +154,7 @@ Error handling for cross chain messages is handled by the application contract a A special type of error is an underpriced transaction - it means that a message is inserted on L1, but the attached fee is too low to be included in a rollup block. -For the case of token bridges, this could lead to funds being locked in the bridge forever, as funds are locked but the message never arrives on L2 to mint the tokens. To address this, the `Inbox` supports cancelling messages after a deadline. However, this must be called by the portal itself, as it will need to "undo" the state changes is made (for example by sending the tokens back to the user). +For the case of token bridges, this could lead to funds being locked in the bridge forever, as funds are locked but the message never arrives on L2 to mint the tokens. To address this, the `Inbox` supports canceling messages after a deadline. However, this must be called by the portal itself, as it will need to "undo" the state changes is made (for example by sending the tokens back to the user). As this requires logic on the portal itself, it is not something that the protocol can enforce. It must be supported by the application builder when building the portal. @@ -189,7 +189,7 @@ bytes memory message = abi.encodeWithSignature( This way, the message can be consumed by the portal contract, but only if the caller is the designated caller. By being a bit clever when specifying the designated caller, we can ensure that the calls are done in the correct order. For the Uniswap example, say that we have token portals implemented as we have done throughout this page, and a Uniswap portal implementing the designated caller. -We require that the Uniswap portal is the caller of the withdraw, and that the uniswap portal implementation is executing the withdraw before the swap. +We require that the Uniswap portal is the caller of the withdrawal, and that the uniswap portal implementation is executing the withdrawal before the swap. The order of execution can be constrained in the contract. Since all of the messages are emitted to L1 in the same transaction, we can leverage transaction atomicity to ensure success of failure of all messages. Note, that crossing the L1/L2 chasm is asynchronous, so there could be a situation where the user has burned their assets on L2 but the swap fails on L1! This could be due to major price movements or the like. In such a case, the user could be stuck with funds on L1 that they cannot get back to L2 unless the portal contract implements a way to properly handle such errors. diff --git a/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md b/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md index 2f2c0d38074..4ec336a0282 100644 --- a/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md +++ b/docs/docs/dev_docs/contracts/resources/common_patterns/authwit.md @@ -114,7 +114,7 @@ This function computes the message hash, and then forwards the call to the more ### Utilities for public calls -Very similar to above, we have variations that work in the public domain. These functions are wrapped to give a similar flow for both cases, but behind the scenes the logic of the account contracts is slightly different since they cannot use the oracle as they are not in the private domain. +Very similar to the above, we have variations that work in the public domain. These functions are wrapped to give a similar flow for both cases, but behind the scenes the logic of the account contracts is slightly different since they cannot use the oracle as they are not in the private domain. #### Example diff --git a/docs/docs/dev_docs/contracts/resources/common_patterns/main.md b/docs/docs/dev_docs/contracts/resources/common_patterns/main.md index a75e128fa6e..9cbbf541f8d 100644 --- a/docs/docs/dev_docs/contracts/resources/common_patterns/main.md +++ b/docs/docs/dev_docs/contracts/resources/common_patterns/main.md @@ -79,7 +79,7 @@ Let's say you have some storage in public and want to move them into the private So you have to create a custom note in the public domain that is not encrypted by some owner - we call such notes a "TransparentNote" since it is created in public, anyone can see the amount and the note is not encrypted by some owner. -This pattern discussed in detail in [writing a token contract section in the shield() method](../../../tutorials/writing_token_contract.md#shield) and [redeem_shield() method](../../../tutorials/writing_token_contract.md#redeem_shield). +This pattern is discussed in detail in [writing a token contract section in the shield() method](../../../tutorials/writing_token_contract.md#shield) and [redeem_shield() method](../../../tutorials/writing_token_contract.md#redeem_shield). ### Discovering my notes When you send someone a note, the note hash gets added to the [note hash tree](../../../../concepts/advanced/data_structures/trees#note-hash-tree). To spend the note, the receiver needs to get the note itself (the note hash preimage). There are two ways you can get a hold of your notes: diff --git a/docs/docs/dev_docs/contracts/syntax/functions.md b/docs/docs/dev_docs/contracts/syntax/functions.md index 334aaa1401b..8548be6afb6 100644 --- a/docs/docs/dev_docs/contracts/syntax/functions.md +++ b/docs/docs/dev_docs/contracts/syntax/functions.md @@ -286,10 +286,10 @@ The kernel can then check that all of the values passed to each circuit in a fun **Returning the context to the kernel.** #include_code context-example-return /yarn-project/noir-contracts/src/contracts/docs_example_contract/src/main.nr rust -Just as the kernel passes information into the the app circuits, the application must return information about the executed app back to the kernel. This is done through a rigid structure we call the `PrivateCircuitPublicInputs`. +Just as the kernel passes information into the app circuits, the application must return information about the executed app back to the kernel. This is done through a rigid structure we call the `PrivateCircuitPublicInputs`. > _Why is it called the `PrivateCircuitPublicInputs`_ -> It is commonly asked why the return values of a function in a circuit are labelled as the `Public Inputs`. Common intuition from other programming paradigms suggests that the return values and public inputs should be distinct. +> It is commonly asked why the return values of a function in a circuit are labeled as the `Public Inputs`. Common intuition from other programming paradigms suggests that the return values and public inputs should be distinct. > However; In the eyes of the circuit, anything that is publicly viewable (or checkable) is a public input. Hence in this case, the return values are also public inputs. This structure contains a host of information about the executed program. It will contain any newly created nullifiers, any messages to be sent to l2 and most importantly it will contain the actual return values of the function! diff --git a/docs/docs/dev_docs/contracts/syntax/globals.md b/docs/docs/dev_docs/contracts/syntax/globals.md index 1a50ac54066..1778c75e56d 100644 --- a/docs/docs/dev_docs/contracts/syntax/globals.md +++ b/docs/docs/dev_docs/contracts/syntax/globals.md @@ -39,7 +39,7 @@ context.timestamp(); ``` ### Block Number -The block number is an sequential identifier that labels each individual block of the network. This value will be the block number of the block the accessing transaction is included in. +The block number is a sequential identifier that labels each individual block of the network. This value will be the block number of the block the accessing transaction is included in. The block number of the genesis block will be 1, with the number increasing by 1 for every block after. ```rust @@ -49,4 +49,4 @@ context.block_number(); :::info *Why do the available global variables differ per execution environment?* The global variables are constrained by the proving environment. In the case of public functions, they are executed on a sequencer that will know the timestamp and number of the next block ( as they are the block producer ). In the case of private functions, we cannot be sure which block our transaction will be included in, hence we can not guarantee values for the timestamp or block number. -::: \ No newline at end of file +::: diff --git a/docs/docs/dev_docs/contracts/syntax/storage/main.md b/docs/docs/dev_docs/contracts/syntax/storage/main.md index b647cc6ff50..da66458882f 100644 --- a/docs/docs/dev_docs/contracts/syntax/storage/main.md +++ b/docs/docs/dev_docs/contracts/syntax/storage/main.md @@ -286,7 +286,7 @@ As part of the initialization of the `Storage` struct, the `Singleton` is create ### `initialize` -As mention, the Singleton is initialized to create the first note and value. +As mentioned, the Singleton is initialized to create the first note and value. When this function is called, a nullifier of the storage slot is created, preventing this Singleton from being initialized again. If an `owner` is specified, the nullifier will be hashed with the owner's secret key. It's crucial to provide an owner if the Singleton is associated with an account. Initializing it without an owner may inadvertently reveal important information about the owner's intention. @@ -416,7 +416,7 @@ An example of how to use this operation is visible in the `easy_private_state`: This function returns the notes the account has access to. -The kernel circuits are constrained to a maximum number of notes this function can return at a time. Check [here](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) and look for `MAX_READ_REQUESTS_PER_CALL` for the up-to-date number. +The kernel circuits are constrained to a maximum number of notes this function can return at a time. Check [here](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) and look for `MAX_READ_REQUESTS_PER_CALL` for the up-to-date number. Because of this limit, we should always consider using the second argument `NoteGetterOptions` to limit the number of notes we need to read and constrain in our programs. This is quite important as every extra call increases the time used to prove the program and we don't want to spend more time than necessary. @@ -430,7 +430,7 @@ Functionally similar to [`get_notes`](#get_notes), but executed unconstrained an #include_code view_notes /yarn-project/aztec-nr/value-note/src/balance_utils.nr rust -There's also a limit on the maximum number of notes that can be returned in one go. To find the current limit, refer to [this file](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) and look for `MAX_NOTES_PER_PAGE`. +There's also a limit on the maximum number of notes that can be returned in one go. To find the current limit, refer to [this file](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) and look for `MAX_NOTES_PER_PAGE`. The key distinction is that this method is unconstrained. It does not perform a check to verify if the notes actually exist, which is something the [`get_notes`](#get_notes) method does under the hood. Therefore, it should only be used in an unconstrained contract function. diff --git a/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md b/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md index 17e49df55ca..351b17bd100 100644 --- a/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md +++ b/docs/docs/dev_docs/contracts/syntax/storage/storage_slots.md @@ -53,7 +53,7 @@ Where the `map_slot` is the slot specified in `Storage::init`, recall: #include_code storage_balances_init yarn-project/noir-contracts/src/contracts/token_contract/src/main.nr rust -And `to` is the actor who receives the note, `amount` of the note and `randomness` is the randomness used to make the note hiding. Without the `randomness` the note could could just as well be plaintext (computational cost of a preimage attack would be trivial in such a case). +And `to` is the actor who receives the note, `amount` of the note and `randomness` is the randomness used to make the note hiding. Without the `randomness` the note could just as well be plaintext (computational cost of a preimage attack would be trivial in such a case). :::info Beware that this hash computation is what the aztec.nr library is doing, and not strictly required by the network (only the kernel computation is). @@ -61,4 +61,4 @@ Beware that this hash computation is what the aztec.nr library is doing, and not With this note structure, the contract can require that only notes sitting at specific storage slots can be used by specific operations, e.g., if transferring funds from `from` to `to`, the notes to destroy should be linked to `H(map_slot, from)` and the new notes (except the change-note) should be linked to `H(map_slot, to)`. -That way, we can have logical storage slots, without them really existing. This means that knowing the storage slot for a note is not enough to actually figure out what is in there (whereas it would be for looking up public state). \ No newline at end of file +That way, we can have logical storage slots, without them really existing. This means that knowing the storage slot for a note is not enough to actually figure out what is in there (whereas it would be for looking up public state). diff --git a/docs/docs/dev_docs/debugging/sandbox-errors.md b/docs/docs/dev_docs/debugging/sandbox-errors.md index c7c2879eef3..f95925a4da2 100644 --- a/docs/docs/dev_docs/debugging/sandbox-errors.md +++ b/docs/docs/dev_docs/debugging/sandbox-errors.md @@ -160,7 +160,7 @@ Circuits work by having a fixed size array. As such, we have limits on how many - too many transient read requests in one tx - too many transient read request membership witnesses in one tx -You can have a look at our current constants/limitations in [constants.nr](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/aztec-nr/aztec/src/constants_gen.nr) +You can have a look at our current constants/limitations in [constants.nr](https://github.com/AztecProtocol/aztec-packages/blob/master/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr) #### 7008 - MEMBERSHIP_CHECK_FAILED diff --git a/docs/docs/dev_docs/limitations/main.md b/docs/docs/dev_docs/limitations/main.md index c697898a6f7..bc28d3fd480 100644 --- a/docs/docs/dev_docs/limitations/main.md +++ b/docs/docs/dev_docs/limitations/main.md @@ -189,7 +189,7 @@ Due to the rigidity of zk-SNARK circuits, there are upper bounds on the amount o Here are the current constants: -#include_code constants /yarn-project/aztec-nr/aztec/src/constants_gen.nr rust +#include_code constants /yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr rust #### What are the consequences? diff --git a/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md b/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md index a541e3abf5d..24ba1d18515 100644 --- a/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md +++ b/docs/docs/dev_docs/tutorials/writing_private_voting_contract.md @@ -75,7 +75,7 @@ We are using various utils within the Aztec library: * `state_vars::{ map::Map, public_state::PublicState, }` - we will use a Map to store the votes (key = voteId, value = number of votes), and PublicState to hold our public values that we mentioned earlier * `types::type_serialization::{..}` - various serialization methods for defining how to use these types * `types::address::{AztecAddress},` - our admin will be held as an address -* `constants_gen::EMPTY_NULLIFIED_COMMITMENT,` - this will come in useful when creating our nullifier +* `constants::EMPTY_NULLIFIED_COMMITMENT,` - this will come in useful when creating our nullifier ## Set up storage diff --git a/l1-contracts/Dockerfile b/l1-contracts/Dockerfile index 73eda3294b3..5544b0eaa7b 100644 --- a/l1-contracts/Dockerfile +++ b/l1-contracts/Dockerfile @@ -1,5 +1,5 @@ # Linting requires node. -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache build-base git python3 curl bash jq WORKDIR /usr/src/l1-contracts COPY . . diff --git a/l1-contracts/scripts/ci_deploy_contracts.sh b/l1-contracts/scripts/ci_deploy_contracts.sh index 0589f43d666..128a1702f31 100755 --- a/l1-contracts/scripts/ci_deploy_contracts.sh +++ b/l1-contracts/scripts/ci_deploy_contracts.sh @@ -1,32 +1,38 @@ #!/bin/bash -FORCE_DEPLOY=${2:-"false"} - -export ETHEREUM_HOST=$DEPLOY_TAG-mainnet-fork.aztec.network:8545/$FORK_API_KEY +export ETHEREUM_HOST=https://$DEPLOY_TAG-mainnet-fork.aztec.network:8545/$FORK_API_KEY REPOSITORY="l1-contracts" CONTENT_HASH=$(calculate_content_hash $REPOSITORY) -# If we have previously successful commit, we can early out if nothing relevant has changed since. -if [[ $FORCE_DEPLOY == 'false' ]] && check_rebuild cache-"$CONTENT_HASH" $REPOSITORY; then - echo "No contract deploy necessary." +echo "Last successfully published commit: $CONTENT_HASH" + +# Check if image hash has alredy been deployed. +if check_rebuild "cache-$CONTENT_HASH-$DEPLOY_TAG-deployed" $REPOSITORY; then + echo "No changes detected, no contract deploy necessary." exit 0 fi +# Login to pull our ecr images with docker. +ecr_login + mkdir -p serve # Contract addresses will be mounted in the serve directory docker run \ - -v $(pwd)/serve:/usr/src/contracts/serve \ + -v $(pwd)/serve:/usr/src/l1-contracts/serve \ -e ETHEREUM_HOST=$ETHEREUM_HOST -e PRIVATE_KEY=$CONTRACT_PUBLISHER_PRIVATE_KEY \ - aztecprotocol/l1-contracts:$DEPLOY_TAG \ + "$ECR_URL/l1-contracts:cache-$CONTENT_HASH" \ ./scripts/deploy_contracts.sh # Write the contract addresses as terraform variables -for KEY in ROLLUP_CONTRACT_ADDRESS REGISTRY_CONTRACT_ADDRESS INBOX_CONTRACT_ADDRESS OUTBOX_CONTRACT_ADDRESS; do +for KEY in ROLLUP_CONTRACT_ADDRESS REGISTRY_CONTRACT_ADDRESS INBOX_CONTRACT_ADDRESS OUTBOX_CONTRACT_ADDRESS CONTRACT_DEPLOYMENT_EMITTER_ADDRESS; do VALUE=$(jq -r .$KEY ./serve/contract_addresses.json) export TF_VAR_$KEY=$VALUE done # Write TF state variables deploy_terraform l1-contracts ./terraform + +# Tag the image as deployed. +retry tag_remote_image $REPOSITORY cache-$CONTENT_HASH cache-$CONTENT_HASH-$DEPLOY_TAG-deployed diff --git a/l1-contracts/scripts/deploy_contracts.sh b/l1-contracts/scripts/deploy_contracts.sh index bdd4ebe8053..6223a0719a9 100755 --- a/l1-contracts/scripts/deploy_contracts.sh +++ b/l1-contracts/scripts/deploy_contracts.sh @@ -32,7 +32,7 @@ export REGISTRY_CONTRACT_ADDRESS=$(deploy_contract ./src/core/messagebridge/Regi export INBOX_CONTRACT_ADDRESS=$(deploy_contract ./src/core/messagebridge/Inbox.sol:Inbox "$REGISTRY_CONTRACT_ADDRESS" | extract_deployed_to) export OUTBOX_CONTRACT_ADDRESS=$(deploy_contract ./src/core/messagebridge/Outbox.sol:Outbox "$REGISTRY_CONTRACT_ADDRESS" | extract_deployed_to) export ROLLUP_CONTRACT_ADDRESS=$(deploy_contract ./src/core/Rollup.sol:Rollup "$REGISTRY_CONTRACT_ADDRESS" | extract_deployed_to) -export CONTRACT_DEPLOYMENT_EMITTER_ADDRESS=$(deploy_contract ./src/core/periphery/ContractDeploymentEmitter.sol:ContractDeploymentEmitter | extract_deployed_to) +export CONTRACT_DEPLOYMENT_EMITTER_ADDRESS=$(deploy_contract ./src/periphery/ContractDeploymentEmitter.sol:ContractDeploymentEmitter | extract_deployed_to) # Store contract addresses in a JSON file jq -n \ diff --git a/l1-contracts/src/core/libraries/ConstantsGen.sol b/l1-contracts/src/core/libraries/ConstantsGen.sol index 436e18e84b2..e033fc779a5 100644 --- a/l1-contracts/src/core/libraries/ConstantsGen.sol +++ b/l1-contracts/src/core/libraries/ConstantsGen.sol @@ -56,13 +56,15 @@ library Constants { uint256 internal constant NOTE_HASH_SUBTREE_HEIGHT = 7; uint256 internal constant NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH = 25; uint256 internal constant NULLIFIER_SUBTREE_HEIGHT = 7; - uint256 internal constant BLOCKS_TREE_HEIGHT = 16; + uint256 internal constant ARCHIVE_HEIGHT = 16; uint256 internal constant NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH = 13; uint256 internal constant L1_TO_L2_MSG_SUBTREE_HEIGHT = 4; uint256 internal constant L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH = 12; uint256 internal constant FUNCTION_SELECTOR_NUM_BYTES = 4; uint256 internal constant MAPPING_SLOT_PEDERSEN_SEPARATOR = 4; uint256 internal constant NUM_FIELDS_PER_SHA256 = 2; + uint256 internal constant ARGS_HASH_CHUNK_LENGTH = 32; + uint256 internal constant ARGS_HASH_CHUNK_COUNT = 16; uint256 internal constant L1_TO_L2_MESSAGE_LENGTH = 8; uint256 internal constant L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH = 26; uint256 internal constant MAX_NOTE_FIELDS_LENGTH = 20; diff --git a/l1-contracts/src/core/libraries/Decoder.sol b/l1-contracts/src/core/libraries/Decoder.sol index 110522190b1..c1c0e4a828e 100644 --- a/l1-contracts/src/core/libraries/Decoder.sol +++ b/l1-contracts/src/core/libraries/Decoder.sol @@ -35,8 +35,8 @@ import {Hash} from "./Hash.sol"; * | 0x00ec | 0x20 | startPublicDataTreeRoot * | 0x010c | 0x20 | startL1ToL2MessageTreeSnapshot.root * | 0x012c | 0x04 | startL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0130 | 0x20 | startBlocksTreeSnapshot.root - * | 0x0150 | 0x04 | startBlocksTreeSnapshot.nextAvailableLeafIndex + * | 0x0130 | 0x20 | startArchiveSnapshot.root + * | 0x0150 | 0x04 | startArchiveSnapshot.nextAvailableLeafIndex * | 0x0154 | 0x20 | endNoteHashTreeSnapshot.root * | 0x0174 | 0x04 | endNoteHashTreeSnapshot.nextAvailableLeafIndex * | 0x0178 | 0x20 | endNullifierTreeSnapshot.root @@ -46,8 +46,8 @@ import {Hash} from "./Hash.sol"; * | 0x01c0 | 0x20 | endPublicDataTreeRoot * | 0x01e0 | 0x20 | endL1ToL2MessageTreeSnapshot.root * | 0x0200 | 0x04 | endL1ToL2MessageTreeSnapshot.nextAvailableLeafIndex - * | 0x0204 | 0x20 | endBlocksTreeSnapshot.root - * | 0x0224 | 0x04 | endBlocksTreeSnapshot.nextAvailableLeafIndex + * | 0x0204 | 0x20 | endArchiveSnapshot.root + * | 0x0224 | 0x04 | endArchiveSnapshot.nextAvailableLeafIndex * | 0x0228 | 0x04 | len(newCommitments) (denoted a) * | 0x022c | a * 0x20 | newCommitments * | 0x022c + a * 0x20 | 0x04 | len(newNullifiers) (denoted b) diff --git a/yarn-project/.dockerignore b/yarn-project/.dockerignore index 20f1f5070d4..f913d69056b 100644 --- a/yarn-project/.dockerignore +++ b/yarn-project/.dockerignore @@ -7,11 +7,10 @@ */data* **/dest -**/*.tsbuildinfo -**/Dockerfile* **/node_modules -Dockerfile +**/Dockerfile* +**/*.tsbuildinfo noir-contracts/src/types noir-contracts/src/artifacts -noir-contracts/target \ No newline at end of file +noir-contracts/target diff --git a/yarn-project/.yarnrc.yml b/yarn-project/.yarnrc.yml index 843129bf681..3491bc0f4b5 100644 --- a/yarn-project/.yarnrc.yml +++ b/yarn-project/.yarnrc.yml @@ -5,3 +5,7 @@ plugins: spec: '@yarnpkg/plugin-workspace-tools' yarnPath: .yarn/releases/yarn-3.6.3.cjs + +logFilters: + - code: YN0013 + level: discard diff --git a/yarn-project/Dockerfile b/yarn-project/Dockerfile index 99f3e771900..257d93af911 100644 --- a/yarn-project/Dockerfile +++ b/yarn-project/Dockerfile @@ -11,10 +11,10 @@ RUN apk add bash perl # Copy in the entire workspace. COPY . . -RUN yarn workspace @aztec/foundation build && \ - yarn workspace @aztec/noir-compiler build && \ - yarn workspace @aztec/noir-contracts noir:build:all && \ - yarn workspace @aztec/noir-protocol-circuits noir:build && \ - yarn tsc -b +RUN yarn workspace @aztec/foundation build +RUN yarn workspace @aztec/noir-compiler build +RUN yarn workspace @aztec/noir-contracts noir:build:all +RUN yarn workspace @aztec/noir-protocol-circuits noir:build +RUN yarn tsc -b ENTRYPOINT ["yarn"] diff --git a/yarn-project/Dockerfile.prod b/yarn-project/Dockerfile.prod new file mode 100644 index 00000000000..95e4479137b --- /dev/null +++ b/yarn-project/Dockerfile.prod @@ -0,0 +1,22 @@ +# This productionifies the workspace, removing all developer dependencies and producing a final slim image from which +# we then generate downstream multiarch containers to execute the specific projects. +FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS yarn-project + +# Need new arch specific image. +FROM node:18.19.0-alpine AS builder +RUN apk add bash jq --no-cache +COPY --from=yarn-project /usr/src /usr/src +WORKDIR /usr/src/yarn-project +ARG COMMIT_TAG="" +RUN ./scripts/version_packages.sh +# Productionify. See comment in yarn-project-base/Dockerfile. +RUN yarn workspaces focus @aztec/cli @aztec/aztec-sandbox @aztec/aztec-faucet --production && \ + yarn cache clean && \ + rm -rf ./**/src + +# We no longer need nargo. +RUN rm -rf /usr/src/noir/target + +# Create fresh minimal size image. +FROM node:18.19.0-alpine +COPY --from=builder /usr/src /usr/src \ No newline at end of file diff --git a/yarn-project/acir-simulator/src/acvm/serialize.ts b/yarn-project/acir-simulator/src/acvm/serialize.ts index abf78f63c1b..afb88c62fc2 100644 --- a/yarn-project/acir-simulator/src/acvm/serialize.ts +++ b/yarn-project/acir-simulator/src/acvm/serialize.ts @@ -111,7 +111,7 @@ export function toACVMBlockHeader(blockHeader: BlockHeader): ACVMField[] { toACVMField(blockHeader.nullifierTreeRoot), toACVMField(blockHeader.contractTreeRoot), toACVMField(blockHeader.l1ToL2MessagesTreeRoot), - toACVMField(blockHeader.blocksTreeRoot), + toACVMField(blockHeader.archiveRoot), toACVMField(blockHeader.publicDataTreeRoot), toACVMField(blockHeader.globalVariablesHash), ]; diff --git a/yarn-project/acir-simulator/src/client/view_data_oracle.ts b/yarn-project/acir-simulator/src/client/view_data_oracle.ts index f744e54dd6c..8a1c2258737 100644 --- a/yarn-project/acir-simulator/src/client/view_data_oracle.ts +++ b/yarn-project/acir-simulator/src/client/view_data_oracle.ts @@ -106,7 +106,7 @@ export class ViewDataOracle extends TypedOracle { block.endNullifierTreeSnapshot.root, block.endContractTreeSnapshot.root, block.endL1ToL2MessagesTreeSnapshot.root, - block.endBlocksTreeSnapshot.root, + block.endArchiveSnapshot.root, new Fr(0), // TODO(#3441) privateKernelVkTreeRoot is not present in L2Block and it's not yet populated in noir block.endPublicDataTreeRoot, computeGlobalsHash(block.globalVariables), diff --git a/yarn-project/aztec-faucet/Dockerfile b/yarn-project/aztec-faucet/Dockerfile index d524e04c8ef..be0711dbb16 100644 --- a/yarn-project/aztec-faucet/Dockerfile +++ b/yarn-project/aztec-faucet/Dockerfile @@ -1,14 +1,4 @@ -FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS builder - +FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod WORKDIR /usr/src/yarn-project/aztec-faucet - -# Productionify. See comment in yarn-project-base/Dockerfile. -RUN yarn cache clean && yarn workspaces focus --production - -# Create final, minimal size image. -FROM node:18-alpine -COPY --from=builder /usr/src/ /usr/src/ -WORKDIR /usr/src/yarn-project/aztec-faucet -ENTRYPOINT ["yarn"] -CMD [ "start" ] +ENTRYPOINT ["yarn", "start"] EXPOSE 8080 diff --git a/yarn-project/aztec-faucet/terraform/main.tf b/yarn-project/aztec-faucet/terraform/main.tf index 65aa239d550..68b0bd5e291 100644 --- a/yarn-project/aztec-faucet/terraform/main.tf +++ b/yarn-project/aztec-faucet/terraform/main.tf @@ -34,6 +34,10 @@ data "terraform_remote_state" "aztec2_iac" { } } +locals { + api_prefix = var.API_PREFIX == "" ? "/${var.DEPLOY_TAG}/aztec-faucet" : "/${var.DEPLOY_TAG}/aztec-faucet/${var.API_PREFIX}" +} + resource "aws_cloudwatch_log_group" "aztec-faucet" { name = "/fargate/service/${var.DEPLOY_TAG}/aztec-faucet" @@ -114,7 +118,7 @@ resource "aws_ecs_task_definition" "aztec-faucet" { }, { "name": "API_PREFIX", - "value": "/${var.DEPLOY_TAG}/aztec-faucet/${var.API_PREFIX}" + "value": "${local.api_prefix}" }, { "name": "CHAIN_ID", @@ -188,7 +192,7 @@ resource "aws_alb_target_group" "aztec-faucet" { deregistration_delay = 5 health_check { - path = "/${var.DEPLOY_TAG}/aztec-faucet/${var.API_PREFIX}/status" + path = "${local.api_prefix}/status" matcher = "200" interval = 10 healthy_threshold = 2 @@ -203,7 +207,7 @@ resource "aws_alb_target_group" "aztec-faucet" { resource "aws_lb_listener_rule" "api-1" { listener_arn = data.terraform_remote_state.aztec2_iac.outputs.alb_listener_arn - priority = 500 + priority = 600 action { type = "forward" diff --git a/yarn-project/aztec-faucet/terraform/variables.tf b/yarn-project/aztec-faucet/terraform/variables.tf index ae556c6c842..42ed1838367 100644 --- a/yarn-project/aztec-faucet/terraform/variables.tf +++ b/yarn-project/aztec-faucet/terraform/variables.tf @@ -17,7 +17,8 @@ variable "API_PREFIX" { } variable "CHAIN_ID" { - type = string + type = string + default = 31337 } variable "FAUCET_PRIVATE_KEY" { diff --git a/yarn-project/aztec-node/src/aztec-node/db.ts b/yarn-project/aztec-node/src/aztec-node/db.ts index 35ebf1b86b6..9b5be428781 100644 --- a/yarn-project/aztec-node/src/aztec-node/db.ts +++ b/yarn-project/aztec-node/src/aztec-node/db.ts @@ -12,8 +12,8 @@ import { AztecNodeConfig } from './config.js'; export const createMemDown = () => (memdown as any)() as MemDown; export const createLevelDown = (path: string) => (leveldown as any)(path) as LevelDown; -const DB_SUBDIR = 'aztec-node'; -const WORLD_STATE_SUBDIR = 'aztec-world-state'; +const DB_SUBDIR = 'aztec-node-db'; +const WORLD_STATE_SUBDIR = 'aztec-world-state-db'; const NODE_METADATA_KEY = '@@aztec_node_metadata'; /** diff --git a/yarn-project/aztec-node/src/aztec-node/server.ts b/yarn-project/aztec-node/src/aztec-node/server.ts index 8ec02fe8668..18f3b5e4865 100644 --- a/yarn-project/aztec-node/src/aztec-node/server.ts +++ b/yarn-project/aztec-node/src/aztec-node/server.ts @@ -1,6 +1,6 @@ import { Archiver, LMDBArchiverStore } from '@aztec/archiver'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, BlockHeader, CONTRACT_TREE_HEIGHT, Fr, @@ -8,6 +8,7 @@ import { L1_TO_L2_MSG_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + NullifierLeafPreimage, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; import { computeGlobalsHash, computePublicDataTreeIndex } from '@aztec/circuits.js/abis'; @@ -391,12 +392,12 @@ export class AztecNodeService implements AztecNode { * @param leafIndex - Index of the leaf in the tree. * @returns The sibling path. */ - public async getBlocksTreeSiblingPath( + public async getArchiveSiblingPath( blockNumber: number | 'latest', leafIndex: bigint, - ): Promise> { + ): Promise> { const committedDb = await this.#getWorldState(blockNumber); - return committedDb.getSiblingPath(MerkleTreeId.BLOCKS_TREE, leafIndex); + return committedDb.getSiblingPath(MerkleTreeId.ARCHIVE, leafIndex); } /** @@ -429,19 +430,19 @@ export class AztecNodeService implements AztecNode { return undefined; } - const leafDataPromise = db.getLeafData(MerkleTreeId.NULLIFIER_TREE, Number(index)); + const leafPreimagePromise = db.getLeafPreimage(MerkleTreeId.NULLIFIER_TREE, index); const siblingPathPromise = db.getSiblingPath( MerkleTreeId.NULLIFIER_TREE, BigInt(index), ); - const [leafData, siblingPath] = await Promise.all([leafDataPromise, siblingPathPromise]); + const [leafPreimage, siblingPath] = await Promise.all([leafPreimagePromise, siblingPathPromise]); - if (!leafData) { + if (!leafPreimage) { return undefined; } - return new NullifierMembershipWitness(BigInt(index), leafData, siblingPath); + return new NullifierMembershipWitness(BigInt(index), leafPreimage as NullifierLeafPreimage, siblingPath); } /** @@ -463,22 +464,21 @@ export class AztecNodeService implements AztecNode { nullifier: Fr, ): Promise { const committedDb = await this.#getWorldState(blockNumber); - const { index, alreadyPresent } = await committedDb.getPreviousValueIndex( - MerkleTreeId.NULLIFIER_TREE, - nullifier.toBigInt(), - ); + const findResult = await committedDb.getPreviousValueIndex(MerkleTreeId.NULLIFIER_TREE, nullifier.toBigInt()); + if (!findResult) { + return undefined; + } + const { index, alreadyPresent } = findResult; if (alreadyPresent) { this.log.warn(`Nullifier ${nullifier.toBigInt()} already exists in the tree`); } - const leafData = await committedDb.getLeafData(MerkleTreeId.NULLIFIER_TREE, index); - if (!leafData) { - return undefined; - } + const preimageData = (await committedDb.getLeafPreimage(MerkleTreeId.NULLIFIER_TREE, index))!; + const siblingPath = await committedDb.getSiblingPath( MerkleTreeId.NULLIFIER_TREE, BigInt(index), ); - return new NullifierMembershipWitness(BigInt(index), leafData, siblingPath); + return new NullifierMembershipWitness(BigInt(index), preimageData as NullifierLeafPreimage, siblingPath); } /** @@ -506,15 +506,14 @@ export class AztecNodeService implements AztecNode { const committedDb = await this.#getWorldState('latest'); const getTreeRoot = async (id: MerkleTreeId) => Fr.fromBuffer((await committedDb.getTreeInfo(id)).root); - const [noteHashTree, nullifierTree, contractTree, l1ToL2MessagesTree, blocksTree, publicDataTree] = - await Promise.all([ - getTreeRoot(MerkleTreeId.NOTE_HASH_TREE), - getTreeRoot(MerkleTreeId.NULLIFIER_TREE), - getTreeRoot(MerkleTreeId.CONTRACT_TREE), - getTreeRoot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), - getTreeRoot(MerkleTreeId.BLOCKS_TREE), - getTreeRoot(MerkleTreeId.PUBLIC_DATA_TREE), - ]); + const [noteHashTree, nullifierTree, contractTree, l1ToL2MessagesTree, archive, publicDataTree] = await Promise.all([ + getTreeRoot(MerkleTreeId.NOTE_HASH_TREE), + getTreeRoot(MerkleTreeId.NULLIFIER_TREE), + getTreeRoot(MerkleTreeId.CONTRACT_TREE), + getTreeRoot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), + getTreeRoot(MerkleTreeId.ARCHIVE), + getTreeRoot(MerkleTreeId.PUBLIC_DATA_TREE), + ]); return { [MerkleTreeId.CONTRACT_TREE]: contractTree, @@ -522,7 +521,7 @@ export class AztecNodeService implements AztecNode { [MerkleTreeId.NULLIFIER_TREE]: nullifierTree, [MerkleTreeId.PUBLIC_DATA_TREE]: publicDataTree, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: l1ToL2MessagesTree, - [MerkleTreeId.BLOCKS_TREE]: blocksTree, + [MerkleTreeId.ARCHIVE]: archive, }; } @@ -539,7 +538,7 @@ export class AztecNodeService implements AztecNode { roots[MerkleTreeId.NULLIFIER_TREE], roots[MerkleTreeId.CONTRACT_TREE], roots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - roots[MerkleTreeId.BLOCKS_TREE], + roots[MerkleTreeId.ARCHIVE], Fr.ZERO, roots[MerkleTreeId.PUBLIC_DATA_TREE], globalsHash, diff --git a/yarn-project/aztec-node/src/bin/index.ts b/yarn-project/aztec-node/src/bin/index.ts index fee7f7808dc..7d265c0f2f6 100644 --- a/yarn-project/aztec-node/src/bin/index.ts +++ b/yarn-project/aztec-node/src/bin/index.ts @@ -2,8 +2,6 @@ import { createDebugLogger } from '@aztec/foundation/log'; import http from 'http'; -import Koa from 'koa'; -import Router from 'koa-router'; import { AztecNodeConfig, AztecNodeService, createAztecNodeRpcServer, getConfigEnvVars } from '../index.js'; @@ -20,19 +18,6 @@ async function createAndDeployAztecNode() { return await AztecNodeService.createAndSync(aztecNodeConfig); } -/** - * Creates a router for helper API endpoints of the Private eXecution Environment (PXE). - * @param apiPrefix - The prefix to use for all api requests - * @returns - The router for handling status requests. - */ -export function createStatusRouter(apiPrefix: string) { - const router = new Router({ prefix: `${apiPrefix}` }); - router.get('/status', (ctx: Koa.Context) => { - ctx.status = 200; - }); - return router; -} - /** * Create and start a new Aztec Node HTTP Server */ @@ -52,9 +37,6 @@ async function main() { const rpcServer = createAztecNodeRpcServer(aztecNode); const app = rpcServer.getApp(API_PREFIX); - const apiRouter = createStatusRouter(API_PREFIX); - app.use(apiRouter.routes()); - app.use(apiRouter.allowedMethods()); const httpServer = http.createServer(app.callback()); httpServer.listen(+AZTEC_NODE_PORT); diff --git a/yarn-project/aztec-node/terraform/main.tf b/yarn-project/aztec-node/terraform/main.tf index 4970090b8b2..3ca143b2f33 100644 --- a/yarn-project/aztec-node/terraform/main.tf +++ b/yarn-project/aztec-node/terraform/main.tf @@ -1,9 +1,3 @@ -# Terraform to setup a prototype network of Aztec Nodes in AWS -# It sets up 2 full nodes with different ports/keys etc. -# Some duplication across the 2 defined services, could possibly -# be refactored to use modules as and when we build out infrastructure for real - - terraform { backend "s3" { bucket = "aztec-terraform" @@ -68,6 +62,7 @@ locals { "/dns4/${var.DEPLOY_TAG}-aztec-bootstrap-${i + 1}.local/tcp/${var.BOOTNODE_LISTEN_PORT + i}/p2p/${local.bootnode_ids[i]}" ] combined_bootnodes = join(",", local.bootnodes) + data_dir = "/usr/src/yarn-project/aztec-sandbox/data" } resource "aws_cloudwatch_log_group" "aztec-node-log-group" { @@ -107,9 +102,38 @@ resource "aws_service_discovery_service" "aztec-node" { } } +# Configure an EFS filesystem. +resource "aws_efs_file_system" "node_data_store" { + count = local.node_count + creation_token = "${var.DEPLOY_TAG}-node-${count.index + 1}-data" + throughput_mode = "provisioned" + provisioned_throughput_in_mibps = 20 + + tags = { + Name = "${var.DEPLOY_TAG}-node-data" + } + + lifecycle_policy { + transition_to_ia = "AFTER_14_DAYS" + } +} + +resource "aws_efs_mount_target" "private_az1" { + count = local.node_count + file_system_id = aws_efs_file_system.node_data_store[count.index].id + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az1_private_id + security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] +} + +resource "aws_efs_mount_target" "private_az2" { + count = local.node_count + file_system_id = aws_efs_file_system.node_data_store[count.index].id + subnet_id = data.terraform_remote_state.setup_iac.outputs.subnet_az2_private_id + security_groups = [data.terraform_remote_state.setup_iac.outputs.security_group_private_id] +} + # Define task definitions for each node. resource "aws_ecs_task_definition" "aztec-node" { - # for_each = var.node_keys count = local.node_count family = "${var.DEPLOY_TAG}-aztec-node-${count.index + 1}" requires_compatibilities = ["FARGATE"] @@ -118,7 +142,15 @@ resource "aws_ecs_task_definition" "aztec-node" { memory = "4096" execution_role_arn = data.terraform_remote_state.setup_iac.outputs.ecs_task_execution_role_arn task_role_arn = data.terraform_remote_state.aztec2_iac.outputs.cloudwatch_logging_ecs_role_arn - container_definitions = < Self { - Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 } + Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 } } pub fn block_hash(self) -> Field { diff --git a/yarn-project/aztec-nr/aztec/src/address.nr b/yarn-project/aztec-nr/aztec/src/address.nr index f1f4224513c..d569f33eea5 100644 --- a/yarn-project/aztec-nr/aztec/src/address.nr +++ b/yarn-project/aztec-nr/aztec/src/address.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::GENERATOR_INDEX__CONTRACT_ADDRESS; +use dep::protocol_types::constants::GENERATOR_INDEX__CONTRACT_ADDRESS; use crate::hash::pedersen_hash; pub fn compute_address(pub_key_x: Field, pub_key_y: Field, partial_address: Field) -> Field { diff --git a/yarn-project/aztec-nr/aztec/src/context.nr b/yarn-project/aztec-nr/aztec/src/context.nr index 720ce9981fb..c7609942d4c 100644 --- a/yarn-project/aztec-nr/aztec/src/context.nr +++ b/yarn-project/aztec-nr/aztec/src/context.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::{ +use dep::protocol_types::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, @@ -295,7 +295,7 @@ impl PrivateContext { nullifier_tree_root : fields[146], contract_tree_root : fields[147], l1_to_l2_messages_tree_root : fields[148], - blocks_tree_root : fields[149], + archive_root : fields[149], public_data_tree_root: fields[150], global_variables_hash: fields[151], }, diff --git a/yarn-project/aztec-nr/aztec/src/hash.nr b/yarn-project/aztec-nr/aztec/src/hash.nr index f8d3abb51aa..eb9fb57aff0 100644 --- a/yarn-project/aztec-nr/aztec/src/hash.nr +++ b/yarn-project/aztec-nr/aztec/src/hash.nr @@ -1,5 +1,5 @@ use dep::std::hash::{pedersen_hash_with_separator, sha256}; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ GENERATOR_INDEX__SIGNATURE_PAYLOAD, GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET, }; diff --git a/yarn-project/aztec-nr/aztec/src/lib.nr b/yarn-project/aztec-nr/aztec/src/lib.nr index d1080f9056f..99c087b0075 100644 --- a/yarn-project/aztec-nr/aztec/src/lib.nr +++ b/yarn-project/aztec-nr/aztec/src/lib.nr @@ -1,6 +1,5 @@ mod abi; mod address; -mod constants_gen; mod context; mod hash; mod log; diff --git a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr index 2e2a254577c..0be0fc4b3ac 100644 --- a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr +++ b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::{ +use dep::protocol_types::constants::{ L1_TO_L2_MESSAGE_LENGTH, GENERATOR_INDEX__NULLIFIER, GENERATOR_INDEX__L1_TO_L2_MESSAGE_SECRET, diff --git a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr index 396d758006a..fbf194d76d1 100644 --- a/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr +++ b/yarn-project/aztec-nr/aztec/src/messaging/l1_to_l2_message_getter_data.nr @@ -1,5 +1,5 @@ use crate::messaging::l1_to_l2_message::L1ToL2Message; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MESSAGE_LENGTH, }; diff --git a/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr b/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr index f44d8ab31ec..b36d7e3fc06 100644 --- a/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr +++ b/yarn-project/aztec-nr/aztec/src/note/lifecycle.nr @@ -9,7 +9,7 @@ use crate::note::{ utils::compute_inner_note_hash, }; use crate::oracle::notes::{notify_created_note, notify_nullified_note}; -use crate::constants_gen::EMPTY_NULLIFIED_COMMITMENT; +use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; pub fn create_note( context: &mut PrivateContext, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_getter.nr b/yarn-project/aztec-nr/aztec/src/note/note_getter.nr index 1e5131a22ae..3827e4641e7 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_getter.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_getter.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ MAX_READ_REQUESTS_PER_CALL, GET_NOTE_ORACLE_RETURN_LENGTH, GET_NOTES_ORACLE_RETURN_LENGTH, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr b/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr index c43fd996850..67dabe8593d 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_getter_options.nr @@ -1,6 +1,6 @@ use dep::std::option::Option; use crate::types::vec::BoundedVec; -use crate::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; struct Select { field_index: u8, diff --git a/yarn-project/aztec-nr/aztec/src/note/note_hash.nr b/yarn-project/aztec-nr/aztec/src/note/note_hash.nr index 4e3618fa650..dc2c6c2c8b3 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_hash.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_hash.nr @@ -1,5 +1,5 @@ use crate::hash::pedersen_hash; -use crate::constants_gen::{GENERATOR_INDEX__UNIQUE_COMMITMENT, GENERATOR_INDEX__SILOED_COMMITMENT}; +use dep::protocol_types::constants::{GENERATOR_INDEX__UNIQUE_COMMITMENT, GENERATOR_INDEX__SILOED_COMMITMENT}; pub fn compute_inner_hash(storage_slot: Field, note_hash: Field) -> Field { // TODO(#1205) Do we need a generator index here? diff --git a/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr b/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr index 709237c4437..15d445d2c02 100644 --- a/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr +++ b/yarn-project/aztec-nr/aztec/src/note/note_viewer_options.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::MAX_NOTES_PER_PAGE; +use dep::protocol_types::constants::MAX_NOTES_PER_PAGE; use crate::note::note_getter_options::{Select, Sort}; use crate::types::vec::BoundedVec; diff --git a/yarn-project/aztec-nr/aztec/src/note/utils.nr b/yarn-project/aztec-nr/aztec/src/note/utils.nr index cf40eaeba9b..685c565d538 100644 --- a/yarn-project/aztec-nr/aztec/src/note/utils.nr +++ b/yarn-project/aztec-nr/aztec/src/note/utils.nr @@ -1,5 +1,5 @@ +use dep::protocol_types::constants::GENERATOR_INDEX__OUTER_NULLIFIER; use crate::{ - constants_gen::GENERATOR_INDEX__OUTER_NULLIFIER, note::{ note_hash::{compute_inner_hash, compute_siloed_hash, compute_unique_hash}, note_header::NoteHeader, diff --git a/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr b/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr index a5ab1eb95f3..ac687b1d2a3 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/call_private_function.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::CALL_PRIVATE_FUNCTION_RETURN_SIZE; +use dep::protocol_types::constants::CALL_PRIVATE_FUNCTION_RETURN_SIZE; #[oracle(callPrivateFunction)] fn call_private_function_oracle(_contract_address: Field, _function_selector: Field, _args_hash: Field) -> [Field; CALL_PRIVATE_FUNCTION_RETURN_SIZE] {} diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr index 5f453221952..9d15d693393 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_block_header.nr @@ -1,10 +1,11 @@ use dep::std::merkle::compute_merkle_root; +use dep::protocol_types::constants::{ + BLOCK_HEADER_LENGTH, + ARCHIVE_HEIGHT, +}; + use crate::{ abi::BlockHeader, - constants_gen::{ - BLOCK_HEADER_LENGTH, - BLOCKS_TREE_HEIGHT, - }, context::PrivateContext, oracle::get_membership_witness::{ get_membership_witness, @@ -27,12 +28,18 @@ pub fn get_block_header(block_number: Field, context: PrivateContext) -> BlockHe // 2) Compute the block hash from the block header let block_hash = block_header.block_hash(); - // 3) Get the membership wintess of the block in the blocks tree - let blocks_tree_id = 5; // TODO(#3443) - let witness: MembershipWitness = get_membership_witness(block_number, blocks_tree_id, block_hash); + // 3) Get the membership witness of the block in the archive + let archive_id = 5; // TODO(#3443) + + // Using `block_number` here for path is incorrect and it will break if we pass in an incorrect block number on input. + // Instead here should be the block number corresponding to `context.block_header.blocks_tree_root` + // This is not currently available in private context. See issue #3564 + let path_block_number = block_number; + + let witness: MembershipWitness = get_membership_witness(path_block_number, archive_id, block_hash); - // 4) Check that the block is in the blocks tree (i.e. the witness is valid) - assert(context.block_header.blocks_tree_root == compute_merkle_root(block_hash, witness.index, witness.path), "Proving membership of a block in blocks tree failed"); + // 4) Check that the block is in the archive (i.e. the witness is valid) + assert(context.block_header.archive_root == compute_merkle_root(block_hash, witness.index, witness.path), "Proving membership of a block in archive failed"); // 5) Return the block header block_header diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr index 358511db402..27f6722ae1b 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_l1_to_l2_message.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH; +use dep::protocol_types::constants::L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH; // Checks if a msg is within the l1ToL2Msg tree #[oracle(getL1ToL2Message)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr index 5122e685d3e..1f43ae52f57 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_membership_witness.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NOTE_HASH_TREE_HEIGHT; +use dep::protocol_types::constants::NOTE_HASH_TREE_HEIGHT; use crate::utils::arr_copy_slice; // Note: We have M here because we need to somehow set it when calling get_membership_witness function and one way to diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr index 64d073d42cb..2cc493d7966 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_nullifier_membership_witness.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NULLIFIER_TREE_HEIGHT; +use dep::protocol_types::constants::NULLIFIER_TREE_HEIGHT; use crate::utils::arr_copy_slice; use crate::hash::pedersen_hash; diff --git a/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr b/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr index 076a4748d69..7fbe0936997 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/get_sibling_path.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::NOTE_HASH_TREE_HEIGHT; +use dep::protocol_types::constants::NOTE_HASH_TREE_HEIGHT; use crate::utils::arr_copy_slice; #[oracle(getSiblingPath)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/logs.nr b/yarn-project/aztec-nr/aztec/src/oracle/logs.nr index caab7b2a13c..6690243c9f5 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/logs.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/logs.nr @@ -1,5 +1,5 @@ use crate::types::point::Point; -use crate::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::protocol_types::constants::NUM_FIELDS_PER_SHA256; // TODO: Should take encrypted data. #[oracle(emitEncryptedLog)] diff --git a/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr b/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr index cda65354018..e8190c49dd3 100644 --- a/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr +++ b/yarn-project/aztec-nr/aztec/src/oracle/public_call.nr @@ -1,4 +1,4 @@ -use crate::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; #[oracle(callPublicFunction)] fn call_public_function_oracle(_contract_address: Field, _function_selector: Field, _args_hash: Field) -> [Field; RETURN_VALUES_LENGTH] {} diff --git a/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr b/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr index 9742106c152..73fedb091ff 100644 --- a/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr +++ b/yarn-project/aztec-nr/aztec/src/private_call_stack_item.nr @@ -1,6 +1,6 @@ use crate::abi::FunctionData; use crate::abi::PrivateCircuitPublicInputs; -use crate::constants_gen::GENERATOR_INDEX__CALL_STACK_ITEM; +use dep::protocol_types::constants::GENERATOR_INDEX__CALL_STACK_ITEM; use crate::hash::pedersen_hash; struct PrivateCallStackItem { diff --git a/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr b/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr index 6c557b9aa57..734d7ea7ee4 100644 --- a/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr +++ b/yarn-project/aztec-nr/aztec/src/public_call_stack_item.nr @@ -6,7 +6,7 @@ use crate::{ FunctionData, }, }; -use crate::constants_gen::{ +use dep::protocol_types::constants::{ RETURN_VALUES_LENGTH, GENERATOR_INDEX__CALL_STACK_ITEM, }; diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr b/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr index 1084fb5a865..48537a552b9 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/immutable_singleton.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::EMPTY_NULLIFIED_COMMITMENT; +use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; use crate::context::{PrivateContext, Context}; use crate::note::{ lifecycle::create_note, diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/set.nr b/yarn-project/aztec-nr/aztec/src/state_vars/set.nr index 66a39382f47..1c9a317fb28 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/set.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/set.nr @@ -1,6 +1,6 @@ use dep::std::option::Option; use crate::abi::PublicContextInputs; -use crate::constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use crate::context::{PrivateContext, PublicContext, Context}; use crate::note::{ lifecycle::{create_note, create_note_hash_from_public, destroy_note}, diff --git a/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr b/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr index 9f64faf3c82..99ad829f1f5 100644 --- a/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr +++ b/yarn-project/aztec-nr/aztec/src/state_vars/singleton.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use crate::constants_gen::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__INITIALIZATION_NULLIFIER}; +use dep::protocol_types::constants::{EMPTY_NULLIFIED_COMMITMENT, GENERATOR_INDEX__INITIALIZATION_NULLIFIER}; use crate::context::{PrivateContext, PublicContext, Context}; use crate::note::{ lifecycle::{create_note, destroy_note}, diff --git a/yarn-project/aztec-nr/value-note/Nargo.toml b/yarn-project/aztec-nr/value-note/Nargo.toml index b1f4a276738..7f87db94351 100644 --- a/yarn-project/aztec-nr/value-note/Nargo.toml +++ b/yarn-project/aztec-nr/value-note/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "lib" [dependencies] -aztec = { path = "../aztec" } \ No newline at end of file +aztec = { path = "../aztec" } +protocol_types = { path = "../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/aztec-nr/value-note/src/filter.nr b/yarn-project/aztec-nr/value-note/src/filter.nr index 8657358cf10..bd0d3025772 100644 --- a/yarn-project/aztec-nr/value-note/src/filter.nr +++ b/yarn-project/aztec-nr/value-note/src/filter.nr @@ -1,5 +1,5 @@ use dep::std::option::Option; -use dep::aztec::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use crate::value_note::ValueNote; pub fn filter_notes_min_sum(notes: [Option; MAX_READ_REQUESTS_PER_CALL], min_sum: Field) -> [Option; MAX_READ_REQUESTS_PER_CALL] { diff --git a/yarn-project/aztec-sandbox/Dockerfile b/yarn-project/aztec-sandbox/Dockerfile index 4dfc92a9596..c61714b06a3 100644 --- a/yarn-project/aztec-sandbox/Dockerfile +++ b/yarn-project/aztec-sandbox/Dockerfile @@ -1,34 +1,4 @@ -FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS yarn-project +FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod -# Need new arch specific image. -FROM node:18-alpine as builder -COPY --from=yarn-project /usr/src /usr/src -ARG COMMIT_TAG="" - -# Update pxe version if COMMIT_TAG has been used. -WORKDIR /usr/src/yarn-project/pxe -RUN if [[ -n "${COMMIT_TAG}" ]]; then \ - jq --arg v ${COMMIT_TAG} '.version = $v' package.json > _temp && mv _temp package.json; \ - fi - -# Update sandbox version if COMMIT_TAG has been used. -WORKDIR /usr/src/yarn-project/aztec-sandbox -RUN if [[ -n "${COMMIT_TAG}" ]]; then \ - jq --arg v ${COMMIT_TAG} '.version = $v' package.json > _temp && mv _temp package.json; \ - fi - -# Productionify. See comment in yarn-project-base/Dockerfile. -RUN yarn workspaces focus --production && yarn cache clean && rm -rf ../**/src - -# Create final, arch specific, minimal size image. -FROM node:18-alpine -COPY --from=builder /usr/src/yarn-project /usr/src/yarn-project -COPY --from=builder /usr/src/barretenberg/ts/package /usr/src/barretenberg/ts/package -COPY --from=builder /usr/src/noir/packages /usr/src/noir/packages -# Just until weird source-resolver bug fixed. -COPY --from=yarn-project /usr/src/noir/compiler /usr/src/noir/compiler -WORKDIR /usr/src/yarn-project/aztec-sandbox -ENV NODE_OPTIONS=--preserve-symlinks -ENTRYPOINT ["yarn"] -CMD [ "start" ] +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/aztec-sandbox/dest/bin/index.js"] EXPOSE 8079 8080 diff --git a/yarn-project/aztec-sandbox/src/bin/index.ts b/yarn-project/aztec-sandbox/src/bin/index.ts index 31dde1ce825..71cf982e046 100644 --- a/yarn-project/aztec-sandbox/src/bin/index.ts +++ b/yarn-project/aztec-sandbox/src/bin/index.ts @@ -2,13 +2,16 @@ import { createAztecNodeRpcServer, getConfigEnvVars as getNodeConfigEnvVars } from '@aztec/aztec-node'; import { AccountManager, createAztecNodeClient, deployInitialSandboxAccounts } from '@aztec/aztec.js'; import { NULL_KEY } from '@aztec/ethereum'; +import { init } from '@aztec/foundation/crypto'; +import { createStatusRouter } from '@aztec/foundation/json-rpc/server'; import { createDebugLogger } from '@aztec/foundation/log'; import { fileURLToPath } from '@aztec/foundation/url'; -import { NoirWasmVersion } from '@aztec/noir-compiler/versions'; +import { NoirCommit } from '@aztec/noir-compiler/versions'; import { BootstrapNode, getP2PConfigEnvVars } from '@aztec/p2p'; import { GrumpkinScalar, PXEService, createPXERpcServer } from '@aztec/pxe'; import { readFileSync } from 'fs'; +import http from 'http'; import { dirname, resolve } from 'path'; import { mnemonicToAccount } from 'viem/accounts'; @@ -34,6 +37,7 @@ const { MODE = 'sandbox', TEST_ACCOUNTS = 'true', DEPLOY_AZTEC_CONTRACTS = 'true', + API_PREFIX = '', } = process.env; const logger = createDebugLogger(`aztec:${MODE}`); @@ -76,15 +80,24 @@ async function main() { const mode = MODE as SandboxMode; - const createShutdown = (cb?: () => Promise) => async () => { - logger.info('Shutting down...'); - if (cb) { - await cb(); - } - process.exit(0); + const installSignalHandlers = (cb?: () => Promise) => { + const shutdown = async () => { + logger.info('Shutting down...'); + if (cb) { + await cb(); + } + process.exit(0); + }; + process.removeAllListeners('SIGINT'); + process.removeAllListeners('SIGTERM'); + process.once('SIGINT', shutdown); + process.once('SIGTERM', shutdown); }; - let shutdown: () => Promise; + installSignalHandlers(); + + // Init crypto (bb.js). + await init(); const logStrings = []; @@ -97,12 +110,12 @@ async function main() { // Code path for starting Sandbox if (mode === SandboxMode.Sandbox) { - logger.info(`Setting up Aztec Sandbox v${version} (noir v${NoirWasmVersion}), please stand by...`); + logger.info(`Setting up Aztec Sandbox v${version} (noir ${NoirCommit}), please stand by...`); const { pxe, node, stop, accounts } = await createAndInitialiseSandbox(deployTestAccounts); // Create shutdown cleanup function - shutdown = createShutdown(stop); + installSignalHandlers(stop); // Start Node and PXE JSON-RPC servers startHttpRpcServer(node, createAztecNodeRpcServer, AZTEC_NODE_PORT); @@ -115,7 +128,7 @@ async function main() { const accountLogStrings = await createAccountLogs(accounts, pxe); logStrings.push(...accountLogStrings); } - logStrings.push(`Aztec Sandbox v${version} (noir v${NoirWasmVersion}) is now ready for use!`); + logStrings.push(`Aztec Sandbox v${version} (noir ${NoirCommit}) is now ready for use!`); } else if (mode === SandboxMode.Node) { // Code path for starting Node only const nodeConfig = getNodeConfigEnvVars(); @@ -131,13 +144,22 @@ async function main() { } const node = await createAztecNode(nodeConfig); - shutdown = createShutdown(node.stop); + installSignalHandlers(node.stop); + + const port = process.env.AZTEC_NODE_PORT || 8080; // Use standard 8080 when no PXE is running + const nodeRpcServer = createAztecNodeRpcServer(node); + const app = nodeRpcServer.getApp(); + + // Add a /status endpoint + const statusRouter = createStatusRouter(API_PREFIX); + app.use(statusRouter.routes()); + app.use(statusRouter.allowedMethods()); // Start Node JSON-RPC server - startHttpRpcServer(node, createAztecNodeRpcServer, 8080); // Use standard 8080 when no PXE is running - logStrings.push( - `Aztec Node v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${AZTEC_NODE_PORT}!`, - ); + const httpServer = http.createServer(app.callback()); + httpServer.listen(port); + + logStrings.push(`Aztec Node v${version} (noir ${NoirCommit}) is now ready for use in port ${port}!`); } else if (mode === SandboxMode.PXE) { // Code path for starting PXE only @@ -145,7 +167,7 @@ async function main() { const node = createAztecNodeClient(AZTEC_NODE_URL); const pxe = await createAztecPXE(node); - shutdown = createShutdown(pxe.stop); + installSignalHandlers(pxe.stop); // Start PXE JSON-RPC server startHttpRpcServer(pxe, createPXERpcServer, PXE_PORT); @@ -157,24 +179,20 @@ async function main() { logStrings.push(...accountLogStrings); } - logStrings.push(`PXE v${version} (noir v${NoirWasmVersion}) is now ready for use in port ${PXE_PORT}!`); + logStrings.push(`PXE v${version} (noir ${NoirCommit}) is now ready for use in port ${PXE_PORT}!`); } else if (mode === SandboxMode.P2PBootstrap) { // Code path for starting a P2P bootstrap node const config = getP2PConfigEnvVars(); const bootstrapNode = new BootstrapNode(logger); await bootstrapNode.start(config); - shutdown = createShutdown(bootstrapNode.stop); + installSignalHandlers(bootstrapNode.stop); logStrings.push( `Bootstrap P2P node is now ready for use. Listening on: ${config.tcpListenIp}:${config.tcpListenPort}.`, ); - } else { - shutdown = createShutdown(); } // Log startup details logger.info(`${splash}\n${github}\n\n`.concat(...logStrings)); - process.once('SIGINT', shutdown); - process.once('SIGTERM', shutdown); } /** diff --git a/yarn-project/aztec.js/package.json b/yarn-project/aztec.js/package.json index 637c4dfed82..007f54fbb1b 100644 --- a/yarn-project/aztec.js/package.json +++ b/yarn-project/aztec.js/package.json @@ -4,9 +4,20 @@ "version": "0.1.0", "type": "module", "exports": { - "node": "./dest/index.js", - "import": "./dest/index.js", - "default": "./dest/main.js" + ".": { + "node": "./dest/index.js", + "import": "./dest/index.js", + "default": "./dest/main.js" + }, + "./interfaces/pxe": "./dest/api/interfaces/pxe.js", + "./abi": "./dest/api/abi.js", + "./aztec_address": "./dest/api/aztec_address.js", + "./eth_address": "./dest/api/eth_address.js", + "./ethereum": "./dest/api/ethereum.js", + "./fields": "./dest/api/fields.js", + "./init": "./dest/api/init.js", + "./log_id": "./dest/api/log_id.js", + "./tx_hash": "./dest/api/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/aztec.js/src/api/README.md b/yarn-project/aztec.js/src/api/README.md new file mode 100644 index 00000000000..473a5bcd826 --- /dev/null +++ b/yarn-project/aztec.js/src/api/README.md @@ -0,0 +1,7 @@ +# API + +This provides a more modular api for importing parts of the library as needed. +The root `index.js` just exposes everything, which can have consequences for startup times and optimizations. +Here we can gradually build up a much more granular api to allow importing precisely what's needed. +This should adopt the opposite philosophy to "export all my child exports". +Every file should (usually) export one thing, and the file/directory structure should be reflected in package.json exports. diff --git a/yarn-project/aztec.js/src/api/abi.ts b/yarn-project/aztec.js/src/api/abi.ts new file mode 100644 index 00000000000..d76502b881d --- /dev/null +++ b/yarn-project/aztec.js/src/api/abi.ts @@ -0,0 +1 @@ +export { ContractArtifact, FunctionArtifact, FunctionSelector } from '@aztec/foundation/abi'; diff --git a/yarn-project/aztec.js/src/api/aztec_address.ts b/yarn-project/aztec.js/src/api/aztec_address.ts new file mode 100644 index 00000000000..c6cece77dae --- /dev/null +++ b/yarn-project/aztec.js/src/api/aztec_address.ts @@ -0,0 +1 @@ +export { AztecAddress } from '@aztec/foundation/aztec-address'; diff --git a/yarn-project/aztec.js/src/api/eth_address.ts b/yarn-project/aztec.js/src/api/eth_address.ts new file mode 100644 index 00000000000..f07492245b8 --- /dev/null +++ b/yarn-project/aztec.js/src/api/eth_address.ts @@ -0,0 +1 @@ +export { EthAddress } from '@aztec/foundation/eth-address'; diff --git a/yarn-project/aztec.js/src/api/ethereum.ts b/yarn-project/aztec.js/src/api/ethereum.ts new file mode 100644 index 00000000000..5be2a7ac37d --- /dev/null +++ b/yarn-project/aztec.js/src/api/ethereum.ts @@ -0,0 +1,6 @@ +export { + deployL1Contract, + deployL1Contracts, + DeployL1Contracts, + L1ContractArtifactsForDeployment, +} from '@aztec/ethereum'; diff --git a/yarn-project/aztec.js/src/api/fields.ts b/yarn-project/aztec.js/src/api/fields.ts new file mode 100644 index 00000000000..6f3f255f748 --- /dev/null +++ b/yarn-project/aztec.js/src/api/fields.ts @@ -0,0 +1 @@ +export { Point, Fr, Fq, GrumpkinScalar } from '@aztec/foundation/fields'; diff --git a/yarn-project/aztec.js/src/api/init.ts b/yarn-project/aztec.js/src/api/init.ts new file mode 100644 index 00000000000..2b5203c9d0b --- /dev/null +++ b/yarn-project/aztec.js/src/api/init.ts @@ -0,0 +1 @@ +export { init as initAztecJs } from '@aztec/foundation/crypto'; diff --git a/yarn-project/aztec.js/src/api/interfaces/pxe.ts b/yarn-project/aztec.js/src/api/interfaces/pxe.ts new file mode 100644 index 00000000000..3dc49f26d71 --- /dev/null +++ b/yarn-project/aztec.js/src/api/interfaces/pxe.ts @@ -0,0 +1 @@ +export { PXE } from '@aztec/types/interfaces'; diff --git a/yarn-project/aztec.js/src/api/log_id.ts b/yarn-project/aztec.js/src/api/log_id.ts new file mode 100644 index 00000000000..6439b39f7d9 --- /dev/null +++ b/yarn-project/aztec.js/src/api/log_id.ts @@ -0,0 +1 @@ +export { LogId } from '@aztec/types/log_id'; diff --git a/yarn-project/aztec.js/src/api/tx_hash.ts b/yarn-project/aztec.js/src/api/tx_hash.ts new file mode 100644 index 00000000000..35ef7f0b387 --- /dev/null +++ b/yarn-project/aztec.js/src/api/tx_hash.ts @@ -0,0 +1 @@ +export { TxHash } from '@aztec/types/tx_hash'; diff --git a/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json b/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json index 491da2af12c..061ae56ba1d 100644 --- a/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/ecdsa_account_contract.json @@ -395,7 +395,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -662,7 +662,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json b/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json index f05dd9664f7..8a0d04a998d 100644 --- a/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/schnorr_account_contract.json @@ -383,7 +383,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -650,7 +650,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json b/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json index fb155f98ec1..5496e442409 100644 --- a/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json +++ b/yarn-project/aztec.js/src/artifacts/schnorr_single_key_account_contract.json @@ -318,7 +318,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -585,7 +585,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } diff --git a/yarn-project/aztec.js/src/contract_deployer/index.ts b/yarn-project/aztec.js/src/contract_deployer/index.ts index ef3f20492c4..20add4a47c0 100644 --- a/yarn-project/aztec.js/src/contract_deployer/index.ts +++ b/yarn-project/aztec.js/src/contract_deployer/index.ts @@ -1,2 +1,3 @@ export * from './contract_deployer.js'; export * from './deploy_sent_tx.js'; +export * from './deploy_method.js'; diff --git a/yarn-project/aztec.js/src/index.ts b/yarn-project/aztec.js/src/index.ts index e9e42ae309b..38f1bc8970f 100644 --- a/yarn-project/aztec.js/src/index.ts +++ b/yarn-project/aztec.js/src/index.ts @@ -1,24 +1,81 @@ -export * from './contract/index.js'; -export * from './contract_deployer/index.js'; -export * from './utils/index.js'; -export * from './pxe_client.js'; -export * from './account/index.js'; -export * from './contract_deployer/deploy_method.js'; -export * from './sandbox/index.js'; -export * from './wallet/index.js'; +/** + * This is our public api. + * Do NOT "export * from ..." here. + * Everything here should be explicit, to ensure we can clearly see everything we're exposing to the world. + * If it's exposed, people will use it, and then we can't remove/change the api without breaking client code. + * At the time of writing we overexpose things that should only be internal. + * + * TODO: Review and narrow scope of public api. + * We should also consider exposing subsections of the api via package.json exports, like we do with foundation. + * This can allow consumers to import much smaller parts of the library to incur less overhead. + * It will also allow web bundlers do perform intelligent chunking of bundles etc. + * Some work as been done on this within the api folder, providing the alternative import style of e.g.: + * ```typescript + * import { TxHash } from '@aztec.js/tx_hash' + * import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; + * import { AztecAddress } from '@aztec/aztec.js/aztec_address'; + * import { EthAddress } from '@aztec/aztec.js/eth_address'; + * ``` + */ +export { + WaitOpts, + ContractFunctionInteraction, + Contract, + ContractBase, + ContractMethod, + SentTx, + BatchCall, +} from './contract/index.js'; + +export { ContractDeployer, DeployMethod, DeploySentTx } from './contract_deployer/index.js'; + +export { + generatePublicKey, + FieldLike, + EthAddressLike, + computeMessageSecretHash, + CheatCodes, + AztecAddressLike, + isContractDeployed, + EthCheatCodes, + computeAuthWitMessageHash, +} from './utils/index.js'; + +export { createPXEClient } from './pxe_client.js'; + +export { + CompleteAddress, + getSchnorrAccount, + AccountContract, + AccountManager, + getUnsafeSchnorrAccount, + EcdsaAccountContract, + createAccounts, + SchnorrAccountContract, + SingleKeyAccountContract, + createAccount, + AuthWitnessProvider, + BaseAccountContract, +} from './account/index.js'; + +export { waitForSandbox, getSandboxAccountsWallets, deployInitialSandboxAccounts } from './sandbox/index.js'; + +export { AccountWalletWithPrivateKey, AccountWallet, Wallet, SignerlessWallet } from './wallet/index.js'; // TODO https://github.com/AztecProtocol/aztec-packages/issues/2632 --> FunctionSelector might not need to be exposed // here once the issue is resolved. export { AztecAddress, EthAddress, - Point, Fr, + Fq, FunctionSelector, GlobalVariables, GrumpkinScalar, + Point, getContractDeploymentInfo, } from '@aztec/circuits.js'; + export { Grumpkin, Schnorr } from '@aztec/circuits.js/barretenberg'; export { @@ -29,20 +86,21 @@ export { ExtendedContractData, ExtendedNote, FunctionCall, - INITIAL_L2_BLOCK_NUM, GrumpkinPrivateKey, + INITIAL_L2_BLOCK_NUM, L2Actor, L2Block, L2BlockL2Logs, LogFilter, + LogId, LogType, MerkleTreeId, NodeInfo, Note, + PXE, PackedArguments, PartialAddress, PublicKey, - PXE, SyncStatus, Tx, TxExecutionRequest, @@ -50,25 +108,31 @@ export { TxReceipt, TxStatus, UnencryptedL2Log, - emptyFunctionCall, createAztecNodeClient, + emptyFunctionCall, merkleTreeIds, mockTx, } from '@aztec/types'; -export { ContractArtifact } from '@aztec/foundation/abi'; +// TODO: These kinds of things have no place on our public api. +// External devs will almost certainly have their own methods of doing these things. +// If we want to use them in our own "aztec.js consuming code", import them from foundation as needed. +export { ContractArtifact, FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; +export { sha256, init } from '@aztec/foundation/crypto'; export { DebugLogger, createDebugLogger, onLog } from '@aztec/foundation/log'; -export { fileURLToPath } from '@aztec/foundation/url'; +export { retry, retryUntil } from '@aztec/foundation/retry'; export { sleep } from '@aztec/foundation/sleep'; export { elapsed } from '@aztec/foundation/timer'; -export { retry, retryUntil } from '@aztec/foundation/retry'; -export * from '@aztec/foundation/crypto'; +export { fileURLToPath } from '@aztec/foundation/url'; export { to2Fields, toBigInt } from '@aztec/foundation/serialize'; export { toBigIntBE } from '@aztec/foundation/bigint-buffer'; +export { makeFetch } from '@aztec/foundation/json-rpc/client'; export { - deployL1Contract, - deployL1Contracts, DeployL1Contracts, L1ContractArtifactsForDeployment, + deployL1Contract, + deployL1Contracts, } from '@aztec/ethereum'; + +export { FieldsOf } from '@aztec/foundation/types'; diff --git a/yarn-project/boxes/blank-react/package.json b/yarn-project/boxes/blank-react/package.json index d8d97c6ba6e..3174826d119 100644 --- a/yarn-project/boxes/blank-react/package.json +++ b/yarn-project/boxes/blank-react/package.json @@ -37,9 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", - "@aztec/types": "workspace:^", "classnames": "^2.3.2", "formik": "^2.4.3", "node-sass": "^9.0.0", diff --git a/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx b/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx index 484a7b10207..66609bd2ab9 100644 --- a/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx +++ b/yarn-project/boxes/blank-react/src/app/components/contract_function_form.tsx @@ -3,8 +3,7 @@ import { callContractFunction, deployContract, viewContractFunction } from '../. import { convertArgs } from '../../scripts/util.js'; import styles from './contract_function_form.module.scss'; import { Button, Loader } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress, Fr } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, ContractArtifact, Fr, FunctionArtifact } from '@aztec/aztec.js'; import { useFormik } from 'formik'; import * as Yup from 'yup'; diff --git a/yarn-project/boxes/blank-react/src/app/contract.tsx b/yarn-project/boxes/blank-react/src/app/contract.tsx index 3f1bd526956..6dfbc219589 100644 --- a/yarn-project/boxes/blank-react/src/app/contract.tsx +++ b/yarn-project/boxes/blank-react/src/app/contract.tsx @@ -3,8 +3,7 @@ import { Copy } from './components/copy.js'; import { ContractFunctionForm, Popup } from './components/index.js'; import styles from './contract.module.scss'; import { Button, ButtonSize, ButtonTheme, Card, CardTheme, ImageButton, ImageButtonIcon } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress } from '@aztec/aztec.js'; -import { FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, FunctionArtifact } from '@aztec/aztec.js'; import { ReactNode, useState } from 'react'; const functionTypeSortOrder = { diff --git a/yarn-project/boxes/blank-react/src/artifacts/Blank.json b/yarn-project/boxes/blank-react/src/artifacts/Blank.json index cf389c951c9..2e06365e481 100644 --- a/yarn-project/boxes/blank-react/src/artifacts/Blank.json +++ b/yarn-project/boxes/blank-react/src/artifacts/Blank.json @@ -40,7 +40,7 @@ "path": "/mnt/user-data/jan/aztec-packages/yarn-project/boxes/blank-react/src/contracts/src/main.nr" }, "35": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "36": { diff --git a/yarn-project/boxes/blank-react/src/config.ts b/yarn-project/boxes/blank-react/src/config.ts index f693a4c7fe9..6521f4df01c 100644 --- a/yarn-project/boxes/blank-react/src/config.ts +++ b/yarn-project/boxes/blank-react/src/config.ts @@ -1,6 +1,5 @@ import { BlankContractArtifact } from './artifacts/Blank.js'; -import { PXE, createPXEClient } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { ContractArtifact, PXE, createPXEClient } from '@aztec/aztec.js'; // update this if using a different contract diff --git a/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts b/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts index 54e971ff68d..d80037eed84 100644 --- a/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts +++ b/yarn-project/boxes/blank-react/src/scripts/call_contract_function.ts @@ -1,7 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract, TxReceipt } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { FieldsOf } from '@aztec/foundation/types'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, FieldsOf, PXE, TxReceipt } from '@aztec/aztec.js'; export async function callContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts b/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts index 08725e90dd6..8cde9eb5b9d 100644 --- a/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts +++ b/yarn-project/boxes/blank-react/src/scripts/deploy_contract.ts @@ -1,6 +1,4 @@ -import { AztecAddress, CompleteAddress, DeployMethod, Fr } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { PXE } from '@aztec/types'; +import { AztecAddress, CompleteAddress, ContractArtifact, DeployMethod, Fr, PXE } from '@aztec/aztec.js'; export async function deployContract( activeWallet: CompleteAddress, diff --git a/yarn-project/boxes/blank-react/src/scripts/util.ts b/yarn-project/boxes/blank-react/src/scripts/util.ts index bea2bcba47f..cde41403d55 100644 --- a/yarn-project/boxes/blank-react/src/scripts/util.ts +++ b/yarn-project/boxes/blank-react/src/scripts/util.ts @@ -1,6 +1,12 @@ -import { AccountWallet, Fr, getSandboxAccountsWallets } from '@aztec/aztec.js'; -import { FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { PXE, CompleteAddress } from '@aztec/types'; +import { + AccountWallet, + CompleteAddress, + Fr, + FunctionArtifact, + PXE, + encodeArguments, + getSandboxAccountsWallets, +} from '@aztec/aztec.js'; export function convertArgs(functionAbi: FunctionArtifact, args: any): Fr[] { const untypedArgs = functionAbi.parameters.map(param => { diff --git a/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts b/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts index 8b3bfd8e901..beff0032c77 100644 --- a/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts +++ b/yarn-project/boxes/blank-react/src/scripts/view_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function viewContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts b/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts index 6f906bfe35c..62fc7b7d1da 100644 --- a/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts +++ b/yarn-project/boxes/blank-react/src/tests/blank.contract.test.ts @@ -9,10 +9,10 @@ import { PXE, TxStatus, Wallet, + createDebugLogger, createPXEClient, waitForSandbox, } from '@aztec/aztec.js'; -import { createDebugLogger } from '@aztec/foundation/log'; const logger = createDebugLogger('aztec:http-pxe-client'); diff --git a/yarn-project/boxes/blank-react/tsconfig.dest.json b/yarn-project/boxes/blank-react/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/blank-react/tsconfig.dest.json +++ b/yarn-project/boxes/blank-react/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/blank-react/tsconfig.json b/yarn-project/boxes/blank-react/tsconfig.json index 755ee612d17..831cd23a900 100644 --- a/yarn-project/boxes/blank-react/tsconfig.json +++ b/yarn-project/boxes/blank-react/tsconfig.json @@ -26,14 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - }, - { - "path": "../../types" - } ] } diff --git a/yarn-project/boxes/blank/package.json b/yarn-project/boxes/blank/package.json index dd7928c6d36..12d5c45576d 100644 --- a/yarn-project/boxes/blank/package.json +++ b/yarn-project/boxes/blank/package.json @@ -37,8 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", "serve": "^14.2.1" }, "devDependencies": { diff --git a/yarn-project/boxes/blank/src/artifacts/Blank.json b/yarn-project/boxes/blank/src/artifacts/Blank.json index 252ef64e52f..19f23ddde4a 100644 --- a/yarn-project/boxes/blank/src/artifacts/Blank.json +++ b/yarn-project/boxes/blank/src/artifacts/Blank.json @@ -91,7 +91,7 @@ "path": "/mnt/user-data/jan/aztec-packages/yarn-project/boxes/blank/src/contracts/src/main.nr" }, "35": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "36": { diff --git a/yarn-project/boxes/blank/src/index.ts b/yarn-project/boxes/blank/src/index.ts index 518e2cfc75b..000a7322f22 100644 --- a/yarn-project/boxes/blank/src/index.ts +++ b/yarn-project/boxes/blank/src/index.ts @@ -5,15 +5,17 @@ import { AztecAddress, CompleteAddress, Contract, + ContractArtifact, DeployMethod, + FieldsOf, Fr, + FunctionArtifact, PXE, TxReceipt, createPXEClient, + encodeArguments, getSandboxAccountsWallets, } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { FieldsOf } from '@aztec/foundation/types'; // docs:end:imports diff --git a/yarn-project/boxes/blank/src/tests/blank.contract.test.ts b/yarn-project/boxes/blank/src/tests/blank.contract.test.ts index 7e076b22bda..6fb153315f0 100644 --- a/yarn-project/boxes/blank/src/tests/blank.contract.test.ts +++ b/yarn-project/boxes/blank/src/tests/blank.contract.test.ts @@ -9,10 +9,10 @@ import { PXE, TxStatus, Wallet, + createDebugLogger, createPXEClient, waitForSandbox, } from '@aztec/aztec.js'; -import { createDebugLogger } from '@aztec/foundation/log'; const logger = createDebugLogger('aztec:blank-box-test'); diff --git a/yarn-project/boxes/blank/tsconfig.dest.json b/yarn-project/boxes/blank/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/blank/tsconfig.dest.json +++ b/yarn-project/boxes/blank/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/blank/tsconfig.json b/yarn-project/boxes/blank/tsconfig.json index 4973d658b21..831cd23a900 100644 --- a/yarn-project/boxes/blank/tsconfig.json +++ b/yarn-project/boxes/blank/tsconfig.json @@ -26,11 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - } ] } diff --git a/yarn-project/boxes/token/package.json b/yarn-project/boxes/token/package.json index d80c4536a6e..81ee3dce74a 100644 --- a/yarn-project/boxes/token/package.json +++ b/yarn-project/boxes/token/package.json @@ -37,9 +37,6 @@ "dependencies": { "@aztec/aztec-ui": "^0.1.14", "@aztec/aztec.js": "workspace:^", - "@aztec/circuits.js": "workspace:^", - "@aztec/foundation": "workspace:^", - "@aztec/types": "workspace:^", "classnames": "^2.3.2", "formik": "^2.4.3", "node-sass": "^9.0.0", diff --git a/yarn-project/boxes/token/src/app/components/contract_function_form.tsx b/yarn-project/boxes/token/src/app/components/contract_function_form.tsx index 01abc409157..3f736a4e667 100644 --- a/yarn-project/boxes/token/src/app/components/contract_function_form.tsx +++ b/yarn-project/boxes/token/src/app/components/contract_function_form.tsx @@ -3,8 +3,7 @@ import { callContractFunction, deployContract, viewContractFunction } from '../. import { convertArgs } from '../../scripts/util.js'; import styles from './contract_function_form.module.scss'; import { Button, Loader } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress, Fr } from '@aztec/aztec.js'; -import { ContractArtifact, FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, ContractArtifact, Fr, FunctionArtifact } from '@aztec/aztec.js'; import { useFormik } from 'formik'; import * as Yup from 'yup'; diff --git a/yarn-project/boxes/token/src/app/contract.tsx b/yarn-project/boxes/token/src/app/contract.tsx index 3bf292a5b4b..f4a76c2bfbe 100644 --- a/yarn-project/boxes/token/src/app/contract.tsx +++ b/yarn-project/boxes/token/src/app/contract.tsx @@ -3,8 +3,7 @@ import { Copy } from './components/copy.js'; import { ContractFunctionForm, Popup } from './components/index.js'; import styles from './contract.module.scss'; import { Button, ButtonSize, ButtonTheme, Card, CardTheme, ImageButton, ImageButtonIcon } from '@aztec/aztec-ui'; -import { AztecAddress, CompleteAddress } from '@aztec/aztec.js'; -import { FunctionArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, FunctionArtifact } from '@aztec/aztec.js'; import { ReactNode, useState } from 'react'; const functionTypeSortOrder = { diff --git a/yarn-project/boxes/token/src/artifacts/Token.json b/yarn-project/boxes/token/src/artifacts/Token.json index 60623ca669c..65a08e6042d 100644 --- a/yarn-project/boxes/token/src/artifacts/Token.json +++ b/yarn-project/boxes/token/src/artifacts/Token.json @@ -245,7 +245,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -514,7 +514,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -774,7 +774,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1174,7 +1174,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1547,7 +1547,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -1823,7 +1823,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2132,7 +2132,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2408,7 +2408,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -2698,7 +2698,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -3068,7 +3068,7 @@ } }, { - "name": "blocks_tree_root", + "name": "archive_root", "type": { "kind": "field" } @@ -3204,7 +3204,7 @@ "path": "std/option.nr" }, "38": { - "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n blocks_tree_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.blocks_tree_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, blocks_tree_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", + "source": "use crate::constants_gen::{\n RETURN_VALUES_LENGTH,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n GENERATOR_INDEX__FUNCTION_ARGS,\n HISTORIC_BLOCK_DATA_LENGTH,\n CONTRACT_DEPLOYMENT_DATA_LENGTH,\n CALL_CONTEXT_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH,\n CONTRACT_STORAGE_READ_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH,\n PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH,\n GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__FUNCTION_DATA,\n GENERATOR_INDEX__PUBLIC_DATA_READ,\n GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST,\n GENERATOR_INDEX__CALL_CONTEXT,\n GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS,\n GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA,\n};\n\nuse crate::oracle::debug_log;\nuse crate::types::vec::BoundedVec;\nuse crate::types::point::Point;\nuse crate::hash::pedersen_hash;\n\n// docs:start:private-global-variables\nstruct PrivateGlobalVariables {\n chain_id: Field,\n version: Field,\n}\n// docs:end:private-global-variables\n\nimpl PrivateGlobalVariables {\n fn serialize(self) -> [Field; 2] {\n [self.chain_id, self.version]\n }\n}\n\n// docs:start:public-global-variables\nstruct PublicGlobalVariables {\n chain_id: Field,\n version: Field,\n block_number: Field,\n timestamp: Field,\n}\n// docs:end:public-global-variables\n\nimpl PublicGlobalVariables {\n fn serialize(self) -> [Field; 4] {\n [self.chain_id, self.version, self.block_number, self.timestamp]\n }\n}\n\n// docs:start:contract-deployment-data\nstruct ContractDeploymentData {\n deployer_public_key: Point,\n constructor_vk_hash : Field,\n function_tree_root : Field,\n contract_address_salt : Field,\n portal_contract_address : Field,\n}\n// docs:end:contract-deployment-data\n\nimpl ContractDeploymentData {\n fn serialize(self) -> [Field; CONTRACT_DEPLOYMENT_DATA_LENGTH] {\n [\n self.deployer_public_key.x,\n self.deployer_public_key.y,\n self.constructor_vk_hash,\n self.function_tree_root,\n self.contract_address_salt,\n self.portal_contract_address,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA)\n }\n}\n\n// PrivateContextInputs are expected to be provided to each private function\n// docs:start:private-context-inputs\nstruct PrivateContextInputs {\n call_context : CallContext,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n private_global_variables: PrivateGlobalVariables,\n}\n// docs:end:private-context-inputs\n\n// PublicContextInputs are expected to be provided to each public function\n// docs:start:public-context-inputs\nstruct PublicContextInputs {\n call_context: CallContext,\n block_data: HistoricBlockData,\n\n public_global_variables: PublicGlobalVariables,\n}\n// docs:end:public-context-inputs\n\n// docs:start:call-context\nstruct CallContext {\n msg_sender : Field,\n storage_contract_address : Field,\n portal_contract_address : Field,\n function_selector: Field,\n\n is_delegate_call : bool,\n is_static_call : bool,\n is_contract_deployment: bool,\n}\n// docs:end:call-context\n\nimpl CallContext {\n fn serialize(self) -> [Field; CALL_CONTEXT_LENGTH] {\n [\n self.msg_sender,\n self.storage_contract_address,\n self.portal_contract_address,\n self.function_selector,\n self.is_delegate_call as Field,\n self.is_static_call as Field,\n self.is_contract_deployment as Field,\n ]\n }\n\n fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__CALL_CONTEXT)\n }\n}\n\n// docs:start:historic-block-data\nstruct HistoricBlockData {\n note_hash_tree_root : Field,\n nullifier_tree_root : Field,\n contract_tree_root : Field,\n l1_to_l2_messages_tree_root : Field,\n archive_root: Field,\n public_data_tree_root: Field,\n global_variables_hash: Field,\n}\n// docs:end:historic-block-data\n\nimpl HistoricBlockData {\n // NOTE: this order must match the order in `private_circuit_public_inputs.hpp`\n pub fn serialize(self) -> [Field; HISTORIC_BLOCK_DATA_LENGTH] {\n [\n self.note_hash_tree_root,\n self.nullifier_tree_root,\n self.contract_tree_root,\n self.l1_to_l2_messages_tree_root,\n self.archive_root,\n self.public_data_tree_root,\n self.global_variables_hash,\n ]\n }\n\n pub fn empty() -> Self {\n Self { note_hash_tree_root: 0, nullifier_tree_root: 0, contract_tree_root: 0, l1_to_l2_messages_tree_root: 0, archive_root: 0, public_data_tree_root: 0, global_variables_hash: 0 }\n }\n}\n\nstruct FunctionData {\n function_selector: Field,\n is_internal: bool,\n is_private: bool,\n is_constructor: bool,\n}\n\nimpl FunctionData {\n fn hash(self) -> Field {\n pedersen_hash([\n self.function_selector,\n self.is_internal as Field,\n self.is_private as Field,\n self.is_constructor as Field,\n ], GENERATOR_INDEX__FUNCTION_DATA)\n }\n}\n\nstruct PrivateCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n read_requests: [Field; crate::abi::MAX_READ_REQUESTS_PER_CALL],\n pending_read_requests: [Field; crate::abi::MAX_PENDING_READ_REQUESTS_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n nullified_commitments: [Field; MAX_NEW_NULLIFIERS_PER_CALL],\n private_call_stack: [Field; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_l2_to_l1_msgs: [Field; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n // Explore introducing a new type like uint256 (similar to Point), so it's more explicit that\n // we're talking about a single number backed by two field elements.\n encrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n encrypted_log_preimages_length: Field,\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n contract_deployment_data: ContractDeploymentData,\n chain_id: Field,\n version: Field,\n}\n\nimpl PrivateCircuitPublicInputs {\n fn hash(self) -> Field {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push(self.call_context.hash());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.nullified_commitments);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.contract_deployment_data.hash());\n fields.push(self.chain_id);\n fields.push(self.version);\n\n pedersen_hash(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS)\n }\n\n fn serialize(self) -> [Field; PRIVATE_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize());\n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n fields.push_array(self.read_requests);\n fields.push_array(self.pending_read_requests);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.private_call_stack);\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.encrypted_logs_hash);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.encrypted_log_preimages_length);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push_array(self.contract_deployment_data.serialize());\n fields.push(self.chain_id);\n fields.push(self.version);\n fields.storage\n }\n}\n\nstruct ContractStorageRead {\n storage_slot: Field,\n value: Field,\n}\n\nimpl ContractStorageRead {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_READ_LENGTH] {\n [self.storage_slot, self.value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_READ)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, value: 0 }\n }\n}\n\nstruct ContractStorageUpdateRequest {\n storage_slot: Field,\n old_value: Field,\n new_value: Field,\n}\n\nimpl ContractStorageUpdateRequest {\n pub fn serialize(self) -> [Field; CONTRACT_STORAGE_UPDATE_REQUEST_LENGTH] {\n [self.storage_slot, self.old_value, self.new_value]\n }\n\n pub fn hash(self) -> Field {\n pedersen_hash(self.serialize(), GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST)\n }\n\n pub fn empty() -> Self {\n Self { storage_slot: 0, old_value: 0, new_value: 0 }\n }\n}\n\n\nstruct PublicCircuitPublicInputs {\n call_context: CallContext,\n args_hash: Field,\n return_values: [Field; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead; MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [Field; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [Field; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [Field; crate::abi::MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs: [Field; crate::abi::MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash: [Field; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: Field,\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicCircuitPublicInputs {\n \n pub fn hash(self) -> Field {\n let mut inputs: BoundedVec = BoundedVec::new(0);\n inputs.push(self.call_context.hash());\n inputs.push(self.args_hash);\n inputs.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n inputs.push(self.contract_storage_update_requests[i].hash());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n inputs.push(self.contract_storage_read[i].hash());\n }\n inputs.push_array(self.public_call_stack);\n inputs.push_array(self.new_commitments);\n inputs.push_array(self.new_nullifiers);\n inputs.push_array(self.new_l2_to_l1_msgs);\n\n inputs.push_array(self.unencrypted_logs_hash);\n inputs.push(self.unencrypted_log_preimages_length);\n inputs.push_array(self.block_data.serialize());\n inputs.push(self.prover_address);\n\n pedersen_hash(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS)\n }\n\n pub fn serialize(self) -> [Field; PUBLIC_CIRCUIT_PUBLIC_INPUTS_LENGTH] {\n let mut fields: BoundedVec = BoundedVec::new(0); \n fields.push_array(self.call_context.serialize()); \n fields.push(self.args_hash);\n fields.push_array(self.return_values);\n for i in 0..MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL {\n fields.push_array(self.contract_storage_update_requests[i].serialize());\n }\n for i in 0..MAX_PUBLIC_DATA_READS_PER_CALL {\n fields.push_array(self.contract_storage_read[i].serialize());\n }\n fields.push_array(self.public_call_stack);\n fields.push_array(self.new_commitments);\n fields.push_array(self.new_nullifiers);\n fields.push_array(self.new_l2_to_l1_msgs);\n fields.push_array(self.unencrypted_logs_hash);\n fields.push(self.unencrypted_log_preimages_length);\n fields.push_array(self.block_data.serialize());\n fields.push(self.prover_address);\n fields.storage\n }\n}\n\nstruct Hasher {\n fields: [Field],\n}\n\nimpl Hasher {\n pub fn new()-> Self {\n Self { fields: [] }\n }\n\n pub fn add(&mut self, field: Field) {\n self.fields = self.fields.push_back(field);\n }\n\n pub fn add_multiple(&mut self, fields: [Field; N]) {\n for i in 0..N {\n self.fields = self.fields.push_back(fields[i]);\n }\n }\n\n pub fn hash(self) -> Field {\n hash_args(self.fields)\n }\n}\n\nglobal ARGS_HASH_CHUNK_LENGTH: u32 = 32;\nglobal ARGS_HASH_CHUNK_COUNT: u32 = 16;\n\npub fn hash_args(args: [Field; N]) -> Field {\n if args.len() == 0 {\n 0\n } else {\n let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT];\n for i in 0..ARGS_HASH_CHUNK_COUNT {\n let mut chunk_hash = 0;\n let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH;\n if start_chunk_index < (args.len() as u32) {\n let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH];\n for j in 0..ARGS_HASH_CHUNK_LENGTH {\n let item_index = i * ARGS_HASH_CHUNK_LENGTH + j;\n if item_index < (args.len() as u32) {\n chunk_args[j] = args[item_index];\n }\n }\n chunk_hash = pedersen_hash(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS);\n }\n chunks_hashes[i] = chunk_hash;\n }\n pedersen_hash(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS)\n }\n}\n", "path": "/aztec/abi.nr" }, "39": { @@ -3212,7 +3212,7 @@ "path": "/aztec/address.nr" }, "41": { - "source": "use crate::constants_gen::{\n EMPTY_NULLIFIED_COMMITMENT,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n};\n\nuse crate::abi;\n\nuse crate::abi::{\n hash_args,\n CallContext,\n ContractDeploymentData,\n HistoricBlockData,\n FunctionData,\n PrivateCircuitPublicInputs,\n PublicCircuitPublicInputs,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// l1 to l2 messaging\nuse crate::messaging::process_l1_to_l2_message;\nuse crate::private_call_stack_item::PrivateCallStackItem;\nuse crate::public_call_stack_item::PublicCallStackItem;\n\nuse crate::types::{\n vec::BoundedVec,\n point::Point,\n};\n\nuse crate::utils::arr_copy_slice;\n\nuse crate::oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n};\n\nuse dep::std::option::Option;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: abi::PrivateContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n pending_read_requests: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n nullified_commitments: BoundedVec,\n\n private_call_stack : BoundedVec,\n public_call_stack : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_data: HistoricBlockData,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: abi::PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n read_requests: BoundedVec::new(0),\n pending_read_requests: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n nullified_commitments: BoundedVec::new(0),\n\n block_data: inputs.block_data,\n\n private_call_stack: BoundedVec::new(0),\n public_call_stack: BoundedVec::new(0),\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.private_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.private_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn finish(self) -> abi::PrivateCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let encrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let encrypted_log_preimages_length = 0;\n let unencrypted_log_preimages_length = 0;\n\n let priv_circuit_pub_inputs = abi::PrivateCircuitPublicInputs {\n call_context: self.inputs.call_context,\n args_hash: self.args_hash,\n return_values: self.return_values.storage,\n read_requests: self.read_requests.storage,\n pending_read_requests: self.pending_read_requests.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n nullified_commitments: self.nullified_commitments.storage,\n private_call_stack: self.private_call_stack.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n encrypted_logs_hash: encrypted_logs_hash,\n unencrypted_logs_hash: unencrypted_logs_hash,\n encrypted_log_preimages_length: encrypted_log_preimages_length,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.block_data,\n contract_deployment_data: self.inputs.contract_deployment_data,\n chain_id: self.inputs.private_global_variables.chain_id,\n version: self.inputs.private_global_variables.version,\n };\n priv_circuit_pub_inputs\n }\n\n pub fn push_read_request(&mut self, read_request: Field) {\n self.read_requests.push(read_request);\n }\n\n pub fn push_pending_read_request(&mut self, pending_read_request: Field) {\n self.pending_read_requests.push(pending_read_request);\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n // We never push a zero nullified_commitment as zero is used to indicate the end\n // of a field array in private kernel. This routine transparently replaces a\n // zero value into the special placeholder: EMPTY_NULLIFIED_COMMITMENT.\n pub fn push_new_nullifier(&mut self, nullifier: Field, nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n let mut non_zero_nullified = nullified_commitment;\n if (non_zero_nullified == 0) {\n non_zero_nullified = EMPTY_NULLIFIED_COMMITMENT;\n }\n self.nullified_commitments.push(non_zero_nullified);\n }\n\n // docs:start:context_message_portal\n pub fn message_portal(&mut self, content: Field) \n // docs:end:context_message_portal\n {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n // docs:start:context_consume_l1_to_l2_message\n // docs:start:consume_l1_to_l2_message\n pub fn consume_l1_to_l2_message(\n &mut self,\n msg_key: Field,\n content: Field,\n secret: Field\n ) \n // docs:end:context_consume_l1_to_l2_message\n {\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, self.this_address(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n // docs:end:consume_l1_to_l2_message\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_private_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_private_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_private_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n ) -> [Field; RETURN_VALUES_LENGTH] {\n self.call_private_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_private_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let fields = call_private_function_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PrivateCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PrivateCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n // TODO handle the offsets as a variable incremented during extraction?\n args_hash: fields[12],\n return_values: arr_copy_slice(fields, [0; RETURN_VALUES_LENGTH], 13),\n read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 17),\n pending_read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 49),\n new_commitments: arr_copy_slice(fields, [0; MAX_NEW_COMMITMENTS_PER_CALL], 81),\n new_nullifiers: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 97),\n nullified_commitments: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 113),\n private_call_stack: arr_copy_slice(fields, [0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], 129),\n public_call_stack: arr_copy_slice(fields, [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], 133),\n new_l2_to_l1_msgs: arr_copy_slice(fields, [0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL], 137),\n encrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 139),\n unencrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 141),\n encrypted_log_preimages_length: fields[143],\n unencrypted_log_preimages_length: fields[144],\n block_data: HistoricBlockData {\n // Must match order in `private_circuit_public_inputs.hpp`\n note_hash_tree_root : fields[145],\n nullifier_tree_root : fields[146],\n contract_tree_root : fields[147],\n l1_to_l2_messages_tree_root : fields[148],\n blocks_tree_root : fields[149],\n public_data_tree_root: fields[150],\n global_variables_hash: fields[151],\n },\n contract_deployment_data: ContractDeploymentData {\n deployer_public_key: Point::new(fields[152], fields[153]),\n constructor_vk_hash : fields[154],\n function_tree_root : fields[155],\n contract_address_salt : fields[156],\n portal_contract_address : fields[157],\n },\n chain_id: fields[158],\n version: fields[159],\n },\n is_execution_request: fields[160] as bool,\n };\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n\n assert(args_hash == item.public_inputs.args_hash);\n\n assert(item.is_execution_request == false);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.private_call_stack.push(item.hash());\n\n item.public_inputs.return_values\n }\n\n pub fn call_public_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_public_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_public_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field,\n ) {\n self.call_public_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_public_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) {\n let fields = enqueue_public_function_call_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PublicCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PublicCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n args_hash: fields[12],\n return_values: [0; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [0; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [0; MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs:[0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash:[0; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: 0,\n block_data: HistoricBlockData::empty(),\n prover_address: 0,\n },\n is_execution_request: true,\n };\n\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n \n assert(args_hash == item.public_inputs.args_hash);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.public_call_stack.push(item.hash());\n }\n}\n\nuse crate::abi::{\n ContractStorageRead,\n ContractStorageUpdateRequest\n};\n\nstruct PublicContext {\n inputs: abi::PublicContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n contract_storage_update_requests: BoundedVec,\n contract_storage_read: BoundedVec,\n public_call_stack: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n\n new_l2_to_l1_msgs: BoundedVec,\n\n unencrypted_logs_hash: BoundedVec,\n unencrypted_logs_preimages_length: Field,\n\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicContext {\n pub fn new(inputs: abi::PublicContextInputs, args_hash: Field) -> PublicContext {\n let empty_storage_read = ContractStorageRead::empty();\n let empty_storage_update = ContractStorageUpdateRequest::empty();\n PublicContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n contract_storage_update_requests: BoundedVec::new(empty_storage_update),\n contract_storage_read: BoundedVec::new(empty_storage_read),\n public_call_stack: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n \n unencrypted_logs_hash: BoundedVec::new(0),\n unencrypted_logs_preimages_length: 0,\n\n block_data: inputs.block_data,\n prover_address: 0,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.public_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.public_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn block_number(self) -> Field {\n self.inputs.public_global_variables.block_number\n }\n\n pub fn timestamp(self) -> Field {\n self.inputs.public_global_variables.timestamp\n }\n\n pub fn finish(self) -> abi::PublicCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_log_preimages_length = 0;\n\n\n // Compute the public call stack hashes\n let pub_circuit_pub_inputs = abi::PublicCircuitPublicInputs {\n call_context: self.inputs.call_context, // Done\n args_hash: self.args_hash, // Done\n contract_storage_update_requests: self.contract_storage_update_requests.storage,\n contract_storage_read: self.contract_storage_read.storage,\n return_values: self.return_values.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n unencrypted_logs_hash: unencrypted_logs_hash,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.inputs.block_data,\n prover_address: self.prover_address,\n };\n pub_circuit_pub_inputs\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, _nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n }\n\n pub fn message_portal(&mut self, content: Field) {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n pub fn consume_l1_to_l2_message(&mut self, msg_key: Field, content: Field, secret: Field) {\n let this = (*self).this_address();\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, this, msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_public_function(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n args: [Field; ARGS_COUNT],\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = abi::hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}", + "source": "use crate::constants_gen::{\n EMPTY_NULLIFIED_COMMITMENT,\n MAX_NEW_COMMITMENTS_PER_CALL,\n MAX_NEW_L2_TO_L1_MSGS_PER_CALL,\n MAX_NEW_NULLIFIERS_PER_CALL,\n MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL,\n MAX_PUBLIC_DATA_READS_PER_CALL,\n MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL,\n MAX_READ_REQUESTS_PER_CALL,\n MAX_PENDING_READ_REQUESTS_PER_CALL,\n NUM_FIELDS_PER_SHA256,\n RETURN_VALUES_LENGTH,\n};\n\nuse crate::abi;\n\nuse crate::abi::{\n hash_args,\n CallContext,\n ContractDeploymentData,\n HistoricBlockData,\n FunctionData,\n PrivateCircuitPublicInputs,\n PublicCircuitPublicInputs,\n};\n\n// TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n// use dep::std::collections::vec::Vec;\n\n// l1 to l2 messaging\nuse crate::messaging::process_l1_to_l2_message;\nuse crate::private_call_stack_item::PrivateCallStackItem;\nuse crate::public_call_stack_item::PublicCallStackItem;\n\nuse crate::types::{\n vec::BoundedVec,\n point::Point,\n};\n\nuse crate::utils::arr_copy_slice;\n\nuse crate::oracle::{\n arguments,\n call_private_function::call_private_function_internal,\n public_call::call_public_function_internal,\n enqueue_public_function_call::enqueue_public_function_call_internal,\n context::get_portal_address,\n};\n\nuse dep::std::option::Option;\n\n// When finished, one can call .finish() to convert back to the abi\nstruct PrivateContext {\n // docs:start:private-context\n inputs: abi::PrivateContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n read_requests: BoundedVec,\n pending_read_requests: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n nullified_commitments: BoundedVec,\n\n private_call_stack : BoundedVec,\n public_call_stack : BoundedVec,\n new_l2_to_l1_msgs : BoundedVec,\n // docs:end:private-context\n\n block_data: HistoricBlockData,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec,\n // unencrypted_logs_preimages: Vec,\n}\n\nimpl PrivateContext {\n pub fn new(inputs: abi::PrivateContextInputs, args_hash: Field) -> PrivateContext {\n PrivateContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n read_requests: BoundedVec::new(0),\n pending_read_requests: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n nullified_commitments: BoundedVec::new(0),\n\n block_data: inputs.block_data,\n\n private_call_stack: BoundedVec::new(0),\n public_call_stack: BoundedVec::new(0),\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.private_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.private_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn finish(self) -> abi::PrivateCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let encrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let encrypted_log_preimages_length = 0;\n let unencrypted_log_preimages_length = 0;\n\n let priv_circuit_pub_inputs = abi::PrivateCircuitPublicInputs {\n call_context: self.inputs.call_context,\n args_hash: self.args_hash,\n return_values: self.return_values.storage,\n read_requests: self.read_requests.storage,\n pending_read_requests: self.pending_read_requests.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n nullified_commitments: self.nullified_commitments.storage,\n private_call_stack: self.private_call_stack.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n encrypted_logs_hash: encrypted_logs_hash,\n unencrypted_logs_hash: unencrypted_logs_hash,\n encrypted_log_preimages_length: encrypted_log_preimages_length,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.block_data,\n contract_deployment_data: self.inputs.contract_deployment_data,\n chain_id: self.inputs.private_global_variables.chain_id,\n version: self.inputs.private_global_variables.version,\n };\n priv_circuit_pub_inputs\n }\n\n pub fn push_read_request(&mut self, read_request: Field) {\n self.read_requests.push(read_request);\n }\n\n pub fn push_pending_read_request(&mut self, pending_read_request: Field) {\n self.pending_read_requests.push(pending_read_request);\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n // We never push a zero nullified_commitment as zero is used to indicate the end\n // of a field array in private kernel. This routine transparently replaces a\n // zero value into the special placeholder: EMPTY_NULLIFIED_COMMITMENT.\n pub fn push_new_nullifier(&mut self, nullifier: Field, nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n let mut non_zero_nullified = nullified_commitment;\n if (non_zero_nullified == 0) {\n non_zero_nullified = EMPTY_NULLIFIED_COMMITMENT;\n }\n self.nullified_commitments.push(non_zero_nullified);\n }\n\n // docs:start:context_message_portal\n pub fn message_portal(&mut self, content: Field) \n // docs:end:context_message_portal\n {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n // docs:start:context_consume_l1_to_l2_message\n // docs:start:consume_l1_to_l2_message\n pub fn consume_l1_to_l2_message(\n &mut self,\n msg_key: Field,\n content: Field,\n secret: Field\n ) \n // docs:end:context_consume_l1_to_l2_message\n {\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, self.this_address(), msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n // docs:end:consume_l1_to_l2_message\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self.inputs;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_private_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_private_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_private_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n ) -> [Field; RETURN_VALUES_LENGTH] {\n self.call_private_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_private_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let fields = call_private_function_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PrivateCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PrivateCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n // TODO handle the offsets as a variable incremented during extraction?\n args_hash: fields[12],\n return_values: arr_copy_slice(fields, [0; RETURN_VALUES_LENGTH], 13),\n read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 17),\n pending_read_requests: arr_copy_slice(fields, [0; MAX_READ_REQUESTS_PER_CALL], 49),\n new_commitments: arr_copy_slice(fields, [0; MAX_NEW_COMMITMENTS_PER_CALL], 81),\n new_nullifiers: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 97),\n nullified_commitments: arr_copy_slice(fields, [0; MAX_NEW_NULLIFIERS_PER_CALL], 113),\n private_call_stack: arr_copy_slice(fields, [0; MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL], 129),\n public_call_stack: arr_copy_slice(fields, [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL], 133),\n new_l2_to_l1_msgs: arr_copy_slice(fields, [0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL], 137),\n encrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 139),\n unencrypted_logs_hash: arr_copy_slice(fields, [0; NUM_FIELDS_PER_SHA256], 141),\n encrypted_log_preimages_length: fields[143],\n unencrypted_log_preimages_length: fields[144],\n block_data: HistoricBlockData {\n // Must match order in `private_circuit_public_inputs.hpp`\n note_hash_tree_root : fields[145],\n nullifier_tree_root : fields[146],\n contract_tree_root : fields[147],\n l1_to_l2_messages_tree_root : fields[148],\n archive_root : fields[149],\n public_data_tree_root: fields[150],\n global_variables_hash: fields[151],\n },\n contract_deployment_data: ContractDeploymentData {\n deployer_public_key: Point::new(fields[152], fields[153]),\n constructor_vk_hash : fields[154],\n function_tree_root : fields[155],\n contract_address_salt : fields[156],\n portal_contract_address : fields[157],\n },\n chain_id: fields[158],\n version: fields[159],\n },\n is_execution_request: fields[160] as bool,\n };\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n\n assert(args_hash == item.public_inputs.args_hash);\n\n assert(item.is_execution_request == false);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.private_call_stack.push(item.hash());\n\n item.public_inputs.return_values\n }\n\n pub fn call_public_function(\n &mut self,\n contract_address: Field, \n function_selector: Field, \n args: [Field; ARGS_COUNT]\n ) {\n let args_hash = hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n self.call_public_function_with_packed_args(contract_address, function_selector, args_hash)\n }\n\n pub fn call_public_function_no_args(\n &mut self,\n contract_address: Field, \n function_selector: Field,\n ) {\n self.call_public_function_with_packed_args(contract_address, function_selector, 0)\n }\n\n pub fn call_public_function_with_packed_args(\n &mut self,\n contract_address: Field,\n function_selector: Field,\n args_hash: Field\n ) {\n let fields = enqueue_public_function_call_internal(\n contract_address, \n function_selector, \n args_hash\n );\n let item = PublicCallStackItem {\n contract_address: fields[0],\n function_data: FunctionData {\n function_selector: fields[1],\n is_internal: fields[2] as bool,\n is_private: fields[3] as bool,\n is_constructor: fields[4] as bool,\n },\n public_inputs: PublicCircuitPublicInputs {\n call_context: CallContext {\n msg_sender : fields[5],\n storage_contract_address : fields[6],\n portal_contract_address : fields[7],\n function_selector: fields[8], // practically same as fields[1]\n is_delegate_call : fields[9] as bool,\n is_static_call : fields[10] as bool,\n is_contract_deployment: fields[11] as bool,\n },\n args_hash: fields[12],\n return_values: [0; RETURN_VALUES_LENGTH],\n contract_storage_update_requests: [ContractStorageUpdateRequest::empty(); MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL],\n contract_storage_read: [ContractStorageRead::empty(); MAX_PUBLIC_DATA_READS_PER_CALL],\n public_call_stack: [0; MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL],\n new_commitments: [0; MAX_NEW_COMMITMENTS_PER_CALL],\n new_nullifiers: [0; MAX_NEW_NULLIFIERS_PER_CALL],\n new_l2_to_l1_msgs:[0; MAX_NEW_L2_TO_L1_MSGS_PER_CALL],\n unencrypted_logs_hash:[0; NUM_FIELDS_PER_SHA256],\n unencrypted_log_preimages_length: 0,\n block_data: HistoricBlockData::empty(),\n prover_address: 0,\n },\n is_execution_request: true,\n };\n\n assert(contract_address == item.contract_address);\n assert(function_selector == item.function_data.function_selector);\n \n assert(args_hash == item.public_inputs.args_hash);\n\n // Assert that the call context of the enqueued call generated by the oracle matches our request.\n // We are issuing a regular call which is not delegate, static, or deployment. We also constrain\n // the msg_sender in the nested call to be equal to our address, and the execution context address\n // for the nested call to be equal to the address we actually called.\n assert(item.public_inputs.call_context.is_delegate_call == false);\n assert(item.public_inputs.call_context.is_static_call == false);\n assert(item.public_inputs.call_context.is_contract_deployment == false);\n assert(item.public_inputs.call_context.msg_sender == self.inputs.call_context.storage_contract_address);\n assert(item.public_inputs.call_context.storage_contract_address == contract_address);\n\n self.public_call_stack.push(item.hash());\n }\n}\n\nuse crate::abi::{\n ContractStorageRead,\n ContractStorageUpdateRequest\n};\n\nstruct PublicContext {\n inputs: abi::PublicContextInputs,\n\n args_hash : Field,\n return_values : BoundedVec,\n\n contract_storage_update_requests: BoundedVec,\n contract_storage_read: BoundedVec,\n public_call_stack: BoundedVec,\n\n new_commitments: BoundedVec,\n new_nullifiers: BoundedVec,\n\n new_l2_to_l1_msgs: BoundedVec,\n\n unencrypted_logs_hash: BoundedVec,\n unencrypted_logs_preimages_length: Field,\n\n block_data: HistoricBlockData,\n prover_address: Field,\n}\n\nimpl PublicContext {\n pub fn new(inputs: abi::PublicContextInputs, args_hash: Field) -> PublicContext {\n let empty_storage_read = ContractStorageRead::empty();\n let empty_storage_update = ContractStorageUpdateRequest::empty();\n PublicContext {\n inputs: inputs,\n\n args_hash: args_hash,\n return_values: BoundedVec::new(0),\n\n contract_storage_update_requests: BoundedVec::new(empty_storage_update),\n contract_storage_read: BoundedVec::new(empty_storage_read),\n public_call_stack: BoundedVec::new(0),\n\n new_commitments: BoundedVec::new(0),\n new_nullifiers: BoundedVec::new(0),\n\n new_l2_to_l1_msgs: BoundedVec::new(0),\n\n \n unencrypted_logs_hash: BoundedVec::new(0),\n unencrypted_logs_preimages_length: 0,\n\n block_data: inputs.block_data,\n prover_address: 0,\n\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n // encrypted_logs_preimages: Vec::new(),\n // unencrypted_logs_preimages: Vec::new(),\n }\n }\n\n pub fn msg_sender(self) -> Field {\n self.inputs.call_context.msg_sender\n }\n\n pub fn this_address(self) -> Field {\n self.inputs.call_context.storage_contract_address\n }\n\n pub fn this_portal_address(self) -> Field {\n self.inputs.call_context.portal_contract_address\n }\n\n pub fn chain_id(self) -> Field {\n self.inputs.public_global_variables.chain_id\n }\n\n pub fn version(self) -> Field {\n self.inputs.public_global_variables.version\n }\n\n pub fn selector(self) -> Field {\n self.inputs.call_context.function_selector\n }\n\n pub fn block_number(self) -> Field {\n self.inputs.public_global_variables.block_number\n }\n\n pub fn timestamp(self) -> Field {\n self.inputs.public_global_variables.timestamp\n }\n\n pub fn finish(self) -> abi::PublicCircuitPublicInputs {\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n let unencrypted_logs_hash = [0; NUM_FIELDS_PER_SHA256];\n let unencrypted_log_preimages_length = 0;\n\n\n // Compute the public call stack hashes\n let pub_circuit_pub_inputs = abi::PublicCircuitPublicInputs {\n call_context: self.inputs.call_context, // Done\n args_hash: self.args_hash, // Done\n contract_storage_update_requests: self.contract_storage_update_requests.storage,\n contract_storage_read: self.contract_storage_read.storage,\n return_values: self.return_values.storage,\n new_commitments: self.new_commitments.storage,\n new_nullifiers: self.new_nullifiers.storage,\n public_call_stack: self.public_call_stack.storage,\n new_l2_to_l1_msgs: self.new_l2_to_l1_msgs.storage,\n unencrypted_logs_hash: unencrypted_logs_hash,\n unencrypted_log_preimages_length: unencrypted_log_preimages_length,\n block_data: self.inputs.block_data,\n prover_address: self.prover_address,\n };\n pub_circuit_pub_inputs\n }\n\n pub fn push_new_note_hash(&mut self, note_hash: Field) {\n self.new_commitments.push(note_hash);\n }\n\n pub fn push_new_nullifier(&mut self, nullifier: Field, _nullified_commitment: Field) {\n self.new_nullifiers.push(nullifier);\n }\n\n pub fn message_portal(&mut self, content: Field) {\n self.new_l2_to_l1_msgs.push(content);\n }\n\n // PrivateContextInputs must be temporarily passed in to prevent too many unknowns\n // Note this returns self to get around an issue where mutable structs do not maintain mutations unless reassigned\n pub fn consume_l1_to_l2_message(&mut self, msg_key: Field, content: Field, secret: Field) {\n let this = (*self).this_address();\n let nullifier = process_l1_to_l2_message(self.block_data.l1_to_l2_messages_tree_root, this, msg_key, content, secret);\n\n // Push nullifier (and the \"commitment\" corresponding to this can be \"empty\")\n self.push_new_nullifier(nullifier, EMPTY_NULLIFIED_COMMITMENT)\n }\n\n pub fn accumulate_encrypted_logs(&mut self, log: [Field; N]) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn accumulate_unencrypted_logs(&mut self, log: T) {\n let _void1 = self;\n let _void2 = log;\n // TODO(https://github.com/AztecProtocol/aztec-packages/issues/1165)\n }\n\n pub fn call_public_function(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n args: [Field; ARGS_COUNT],\n ) -> [Field; RETURN_VALUES_LENGTH] {\n let args_hash = abi::hash_args(args);\n assert(args_hash == arguments::pack_arguments(args));\n call_public_function_internal(\n contract_address, \n function_selector, \n args_hash,\n )\n }\n\n pub fn call_public_function_no_args(\n _self: Self,\n contract_address: Field, \n function_selector: Field,\n ) -> [Field; RETURN_VALUES_LENGTH] {\n call_public_function_internal(\n contract_address, \n function_selector, \n 0,\n )\n }\n\n}\n\nstruct Context {\n private: Option<&mut PrivateContext>,\n public: Option<&mut PublicContext>,\n}\n\nimpl Context {\n pub fn private(context: &mut PrivateContext) -> Context {\n Context {\n private: Option::some(context),\n public: Option::none()\n }\n }\n\n pub fn public(context: &mut PublicContext) -> Context {\n Context {\n public: Option::some(context),\n private: Option::none()\n }\n }\n\n pub fn none() -> Context {\n Context {\n public: Option::none(),\n private: Option::none()\n }\n }\n}", "path": "/aztec/context.nr" }, "42": { diff --git a/yarn-project/boxes/token/src/config.ts b/yarn-project/boxes/token/src/config.ts index 94a28ae43bd..86d549894a3 100644 --- a/yarn-project/boxes/token/src/config.ts +++ b/yarn-project/boxes/token/src/config.ts @@ -1,6 +1,5 @@ import { TokenContractArtifact } from './artifacts/Token.js'; -import { PXE, createPXEClient } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { ContractArtifact, PXE, createPXEClient } from '@aztec/aztec.js'; // update this if using a different contract diff --git a/yarn-project/boxes/token/src/contracts/Nargo.toml b/yarn-project/boxes/token/src/contracts/Nargo.toml index 469bf27dcc3..a69d8c878a0 100644 --- a/yarn-project/boxes/token/src/contracts/Nargo.toml +++ b/yarn-project/boxes/token/src/contracts/Nargo.toml @@ -8,4 +8,5 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } value_note = { path = "../../../../aztec-nr/value-note"} safe_math = { path = "../../../../aztec-nr/safe-math" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr b/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr index 37dbcd4ddbe..9be83b78710 100644 --- a/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr +++ b/yarn-project/boxes/token/src/contracts/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/boxes/token/src/contracts/src/types/token_note.nr b/yarn-project/boxes/token/src/contracts/src/types/token_note.nr index fac2b5eeca3..a524d08c9e4 100644 --- a/yarn-project/boxes/token/src/contracts/src/types/token_note.nr +++ b/yarn-project/boxes/token/src/contracts/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -6,7 +7,6 @@ use dep::aztec::{ }, hash::pedersen_hash, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, }; diff --git a/yarn-project/boxes/token/src/scripts/call_contract_function.ts b/yarn-project/boxes/token/src/scripts/call_contract_function.ts index 854803eaa42..b05492f065f 100644 --- a/yarn-project/boxes/token/src/scripts/call_contract_function.ts +++ b/yarn-project/boxes/token/src/scripts/call_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function callContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/token/src/scripts/deploy_contract.ts b/yarn-project/boxes/token/src/scripts/deploy_contract.ts index 84c28cf6318..272ebc637ed 100644 --- a/yarn-project/boxes/token/src/scripts/deploy_contract.ts +++ b/yarn-project/boxes/token/src/scripts/deploy_contract.ts @@ -1,6 +1,4 @@ -import { AztecAddress, CompleteAddress, DeployMethod, Fr } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; -import { PXE } from '@aztec/types'; +import { AztecAddress, CompleteAddress, ContractArtifact, DeployMethod, Fr, PXE } from '@aztec/aztec.js'; export async function deployContract( activeWallet: CompleteAddress, diff --git a/yarn-project/boxes/token/src/scripts/util.ts b/yarn-project/boxes/token/src/scripts/util.ts index 6b6b8884ca8..f2aa3609e15 100644 --- a/yarn-project/boxes/token/src/scripts/util.ts +++ b/yarn-project/boxes/token/src/scripts/util.ts @@ -1,6 +1,12 @@ -import { AccountWallet, Fr, getSandboxAccountsWallets } from '@aztec/aztec.js'; -import { FunctionArtifact, encodeArguments } from '@aztec/foundation/abi'; -import { CompleteAddress, PXE } from '@aztec/types'; +import { + AccountWallet, + CompleteAddress, + Fr, + FunctionArtifact, + PXE, + encodeArguments, + getSandboxAccountsWallets, +} from '@aztec/aztec.js'; function convertBasicArg(paramType: string, value: any) { switch (paramType) { diff --git a/yarn-project/boxes/token/src/scripts/view_contract_function.ts b/yarn-project/boxes/token/src/scripts/view_contract_function.ts index 8b3bfd8e901..beff0032c77 100644 --- a/yarn-project/boxes/token/src/scripts/view_contract_function.ts +++ b/yarn-project/boxes/token/src/scripts/view_contract_function.ts @@ -1,6 +1,5 @@ import { getWallet } from './util.js'; -import { AztecAddress, PXE, CompleteAddress, Contract } from '@aztec/aztec.js'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { AztecAddress, CompleteAddress, Contract, ContractArtifact, PXE } from '@aztec/aztec.js'; export async function viewContractFunction( address: AztecAddress, diff --git a/yarn-project/boxes/token/src/tests/token.contract.test.ts b/yarn-project/boxes/token/src/tests/token.contract.test.ts index 0ec4dee07cc..2ff90186f74 100644 --- a/yarn-project/boxes/token/src/tests/token.contract.test.ts +++ b/yarn-project/boxes/token/src/tests/token.contract.test.ts @@ -2,6 +2,9 @@ import { TokenContract } from '../artifacts/Token.js'; import { TokenSimulator } from './token_simulator.js'; import { AccountWallet, + CompleteAddress, + DebugLogger, + ExtendedNote, Fr, Note, PXE, @@ -9,13 +12,11 @@ import { TxStatus, computeAuthWitMessageHash, computeMessageSecretHash, + createDebugLogger, createPXEClient, getSandboxAccountsWallets, waitForSandbox, } from '@aztec/aztec.js'; -import { CompleteAddress } from '@aztec/circuits.js'; -import { DebugLogger, createDebugLogger } from '@aztec/foundation/log'; -import { ExtendedNote } from '@aztec/types'; import { afterEach, beforeAll, expect, jest } from '@jest/globals'; // assumes sandbox is running locally, which this script does not trigger diff --git a/yarn-project/boxes/token/tsconfig.dest.json b/yarn-project/boxes/token/tsconfig.dest.json index 1b9c3a4a72e..97c9e8ef0da 100644 --- a/yarn-project/boxes/token/tsconfig.dest.json +++ b/yarn-project/boxes/token/tsconfig.dest.json @@ -1,5 +1,5 @@ { "extends": ".", - "references": [{ "path": "../../aztec.js" }, { "path": "../../foundation" }, { "path": "../../types" }], + "references": [{ "path": "../../aztec.js" }], "exclude": ["src/**/*.test.ts"] } diff --git a/yarn-project/boxes/token/tsconfig.json b/yarn-project/boxes/token/tsconfig.json index 755ee612d17..831cd23a900 100644 --- a/yarn-project/boxes/token/tsconfig.json +++ b/yarn-project/boxes/token/tsconfig.json @@ -26,14 +26,5 @@ { "path": "../../aztec.js" }, - { - "path": "../../circuits.js" - }, - { - "path": "../../foundation" - }, - { - "path": "../../types" - } ] } diff --git a/yarn-project/canary/Dockerfile b/yarn-project/canary/Dockerfile index 64629daf71a..3856f455c9f 100644 --- a/yarn-project/canary/Dockerfile +++ b/yarn-project/canary/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18-alpine AS builder +FROM node:18.19.0-alpine AS builder RUN apk update && apk add --no-cache bash jq curl @@ -20,7 +20,7 @@ WORKDIR /usr/src/canary RUN ./scripts/update_packages.sh canary ../end-to-end/ RUN yarn && yarn build -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache udev ttf-freefont chromium bash ENV CHROME_BIN="/usr/bin/chromium-browser" PUPPETEER_SKIP_CHROMIUM_DOWNLOAD="true" diff --git a/yarn-project/canary/Dockerfile.build b/yarn-project/canary/Dockerfile.build index 3173c2100a4..fadd8daaa8b 100644 --- a/yarn-project/canary/Dockerfile.build +++ b/yarn-project/canary/Dockerfile.build @@ -3,7 +3,7 @@ WORKDIR /usr/src/yarn-project/canary # Productionify. See comment in yarn-project-base/Dockerfile. RUN yarn cache clean && yarn workspaces focus --production -FROM node:18-alpine +FROM node:18.19.0-alpine COPY --from=builder /usr/src /usr/src WORKDIR /usr/src/yarn-project/canary ENTRYPOINT ["yarn", "test"] \ No newline at end of file diff --git a/yarn-project/circuits.js/src/abis/abis.ts b/yarn-project/circuits.js/src/abis/abis.ts index ecee67a7528..6d268d65518 100644 --- a/yarn-project/circuits.js/src/abis/abis.ts +++ b/yarn-project/circuits.js/src/abis/abis.ts @@ -116,10 +116,18 @@ export function computeFunctionLeaf(fnLeaf: FunctionLeafPreimage): Fr { ); } -// The "zero leaf" of the function tree is the hash of 5 zero fields. -// TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? -const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); -const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); +let functionTreeRootCalculator: MerkleTreeCalculator | undefined; +/** + * The "zero leaf" of the function tree is the hash of 5 zero fields. + * TODO: Why can we not just use a zero field as the zero leaf? Complicates things perhaps unnecessarily? + */ +function getFunctionTreeRootCalculator() { + if (!functionTreeRootCalculator) { + const functionTreeZeroLeaf = pedersenHash(new Array(5).fill(Buffer.alloc(32))); + functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT, functionTreeZeroLeaf); + } + return functionTreeRootCalculator; +} /** * Computes a function tree from function leaves. @@ -128,7 +136,9 @@ const functionTreeRootCalculator = new MerkleTreeCalculator(FUNCTION_TREE_HEIGHT */ export function computeFunctionTree(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return functionTreeRootCalculator.computeTree(leaves).map(b => Fr.fromBuffer(b)); + return getFunctionTreeRootCalculator() + .computeTree(leaves) + .map(b => Fr.fromBuffer(b)); } /** @@ -138,7 +148,7 @@ export function computeFunctionTree(fnLeaves: Fr[]) { */ export function computeFunctionTreeRoot(fnLeaves: Fr[]) { const leaves = fnLeaves.map(fr => fr.toBuffer()); - return Fr.fromBuffer(functionTreeRootCalculator.computeTreeRoot(leaves)); + return Fr.fromBuffer(getFunctionTreeRootCalculator().computeTreeRoot(leaves)); } /** @@ -535,7 +545,7 @@ function computePrivateInputsHash(input: PrivateCircuitPublicInputs) { input.blockHeader.nullifierTreeRoot.toBuffer(), input.blockHeader.contractTreeRoot.toBuffer(), input.blockHeader.l1ToL2MessagesTreeRoot.toBuffer(), - input.blockHeader.blocksTreeRoot.toBuffer(), + input.blockHeader.archiveRoot.toBuffer(), input.blockHeader.publicDataTreeRoot.toBuffer(), input.blockHeader.globalVariablesHash.toBuffer(), computeContractDeploymentDataHash(input.contractDeploymentData).toBuffer(), @@ -603,7 +613,7 @@ function computePublicInputsHash(input: PublicCircuitPublicInputs) { input.blockHeader.nullifierTreeRoot.toBuffer(), input.blockHeader.contractTreeRoot.toBuffer(), input.blockHeader.l1ToL2MessagesTreeRoot.toBuffer(), - input.blockHeader.blocksTreeRoot.toBuffer(), + input.blockHeader.archiveRoot.toBuffer(), input.blockHeader.publicDataTreeRoot.toBuffer(), input.blockHeader.globalVariablesHash.toBuffer(), input.proverAddress.toBuffer(), diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts deleted file mode 100644 index 58966728949..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Fr } from '@aztec/foundation/fields'; - -import { MerkleTreeRootCalculator } from './merkle_tree_root_calculator.js'; - -describe('merkle tree root calculator', () => { - it('should correctly handle no leaves', () => { - // Height of 3 is 8 leaves. - const calculator = new MerkleTreeRootCalculator(4); - const expected = calculator.computeTreeRoot(new Array(8).fill(new Fr(0)).map(fr => fr.toBuffer())); - expect(calculator.computeTreeRoot()).toEqual(expected); - }); - - it('should correctly leverage zero hashes', () => { - const calculator = new MerkleTreeRootCalculator(4); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(Buffer.alloc(32))]; - const expected = calculator.computeTreeRoot(padded); - const result = calculator.computeTreeRoot(leaves); - expect(result).not.toBeUndefined(); - expect(result).toEqual(expected); - }); - - it('should correctly handle non default zero leaf', () => { - const zeroLeaf = new Fr(666).toBuffer(); - const calculator = new MerkleTreeRootCalculator(4, zeroLeaf); - const leaves = Array.from({ length: 5 }).map((_, i) => new Fr(i).toBuffer()); - const padded = [...leaves, ...new Array(3).fill(zeroLeaf)]; - const expected = calculator.computeTreeRoot(padded); - expect(calculator.computeTreeRoot(leaves)).toEqual(expected); - }); -}); diff --git a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts b/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts deleted file mode 100644 index 904eec35776..00000000000 --- a/yarn-project/circuits.js/src/abis/merkle_tree_root_calculator.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { pedersenHash } from '@aztec/foundation/crypto'; - -/** - * Calculates the root of a merkle tree. - */ -export class MerkleTreeRootCalculator { - private zeroHashes: Buffer[]; - - constructor(private height: number, zeroLeaf = Buffer.alloc(32)) { - this.zeroHashes = Array.from({ length: height }).reduce( - (acc: Buffer[], _, i) => [...acc, pedersenHash([acc[i], acc[i]])], - [zeroLeaf], - ); - } - - computeTreeRoot(leaves: Buffer[] = []) { - if (leaves.length === 0) { - return this.zeroHashes[this.zeroHashes.length - 1]; - } - - for (let i = 0; i < this.height; ++i) { - let j = 0; - for (; j < leaves.length / 2; ++j) { - const l = leaves[j * 2]; - const r = leaves[j * 2 + 1] || this.zeroHashes[i]; - leaves[j] = pedersenHash([l, r]); - } - leaves = leaves.slice(0, j); - } - - return leaves[0]; - } -} diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts index 9b8afc328e2..df34d06d305 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.test.ts @@ -1,3 +1,5 @@ +import { init } from '@aztec/foundation/crypto'; + import { createCipheriv, createDecipheriv, randomBytes } from 'crypto'; import { Aes128 } from './index.js'; @@ -5,7 +7,8 @@ import { Aes128 } from './index.js'; describe('aes128', () => { let aes128!: Aes128; - beforeAll(() => { + beforeAll(async () => { + await init(); aes128 = new Aes128(); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts index 20e0e133b9c..cf3a8a5ddec 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/aes128/index.ts @@ -2,10 +2,6 @@ import { BarretenbergSync, RawBuffer } from '@aztec/bb.js'; import { Buffer } from 'buffer'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * AES-128-CBC encryption/decryption. */ @@ -28,6 +24,7 @@ export class Aes128 { } const input = Buffer.concat([data, paddingBuffer]); + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesEncryptBufferCbc(new RawBuffer(input), new RawBuffer(iv), new RawBuffer(key), input.length), ); @@ -41,6 +38,7 @@ export class Aes128 { * @returns Decrypted data. */ public decryptBufferCBC(data: Uint8Array, iv: Uint8Array, key: Uint8Array) { + const api = BarretenbergSync.getSingleton(); return Buffer.from( api.aesDecryptBufferCbc(new RawBuffer(data), new RawBuffer(iv), new RawBuffer(key), data.length), ); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts index e52933eccf4..3b7dd7d3d73 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/index.ts @@ -4,22 +4,22 @@ import { EcdsaSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * ECDSA signature construction and helper operations. + * TODO: Replace with codegen api on bb.js. */ export class Ecdsa { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a secp256k1 public key from a private key. * @param privateKey - Secp256k1 private key. * @returns A secp256k1 public key. */ public computePublicKey(privateKey: Buffer): Buffer { - wasm.writeMemory(0, privateKey); - wasm.call('ecdsa__compute_public_key', 0, 32); - return Buffer.from(wasm.getMemorySlice(32, 96)); + this.wasm.writeMemory(0, privateKey); + this.wasm.call('ecdsa__compute_public_key', 0, 32); + return Buffer.from(this.wasm.getMemorySlice(32, 96)); } /** @@ -29,15 +29,15 @@ export class Ecdsa { * @returns An ECDSA signature of the form (r, s, v). */ public constructSignature(msg: Uint8Array, privateKey: Buffer) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, privateKey); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, privateKey); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__construct_signature', mem, msg.length, 0, 32, 64, 96); return new EcdsaSignature( - Buffer.from(wasm.getMemorySlice(32, 64)), - Buffer.from(wasm.getMemorySlice(64, 96)), - Buffer.from(wasm.getMemorySlice(96, 97)), + Buffer.from(this.wasm.getMemorySlice(32, 64)), + Buffer.from(this.wasm.getMemorySlice(64, 96)), + Buffer.from(this.wasm.getMemorySlice(96, 97)), ); } @@ -48,14 +48,14 @@ export class Ecdsa { * @returns The secp256k1 public key of the signer. */ public recoverPublicKey(msg: Uint8Array, sig: EcdsaSignature): Buffer { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, sig.r); - wasm.writeMemory(32, sig.s); - wasm.writeMemory(64, sig.v); - wasm.writeMemory(mem, msg); - wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, sig.r); + this.wasm.writeMemory(32, sig.s); + this.wasm.writeMemory(64, sig.v); + this.wasm.writeMemory(mem, msg); + this.wasm.call('ecdsa__recover_public_key_from_signature', mem, msg.length, 0, 32, 64, 65); - return Buffer.from(wasm.getMemorySlice(65, 129)); + return Buffer.from(this.wasm.getMemorySlice(65, 129)); } /** @@ -66,12 +66,12 @@ export class Ecdsa { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: Buffer, sig: EcdsaSignature) { - const mem = wasm.call('bbmalloc', msg.length); - wasm.writeMemory(0, pubKey); - wasm.writeMemory(64, sig.r); - wasm.writeMemory(96, sig.s); - wasm.writeMemory(128, sig.v); - wasm.writeMemory(mem, msg); - return wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; + const mem = this.wasm.call('bbmalloc', msg.length); + this.wasm.writeMemory(0, pubKey); + this.wasm.writeMemory(64, sig.r); + this.wasm.writeMemory(96, sig.s); + this.wasm.writeMemory(128, sig.v); + this.wasm.writeMemory(mem, msg); + return this.wasm.call('ecdsa__verify_signature', mem, msg.length, 0, 64, 96, 128) ? true : false; } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts index 5a18f988c40..cea06e33b1e 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/ecdsa/signature.ts @@ -4,7 +4,7 @@ import { mapTuple } from '@aztec/foundation/serialize'; import { randomBytes } from 'crypto'; -import { Signature } from '../index.js'; +import { Signature } from '../signature/index.js'; /** * ECDSA signature used for transactions. diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts index 154ab39075f..671c019291b 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.test.ts @@ -1,3 +1,4 @@ +import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { GrumpkinScalar, Point } from '../../../index.js'; @@ -8,7 +9,8 @@ const debug = createDebugLogger('bb:grumpkin_test'); describe('grumpkin', () => { let grumpkin!: Grumpkin; - beforeAll(() => { + beforeAll(async () => { + await init(); grumpkin = new Grumpkin(); }); diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts index a41c0af1fa0..3abf74fd20a 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/grumpkin/index.ts @@ -1,15 +1,12 @@ import { BarretenbergSync } from '@aztec/bb.js'; -import { Fr, Point } from '@aztec/foundation/fields'; - -import { GrumpkinScalar } from '../../../index.js'; - -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); +import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; /** * Grumpkin elliptic curve operations. */ export class Grumpkin { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Point.fromBuffer(Buffer.from([ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, @@ -33,10 +30,10 @@ export class Grumpkin { * @returns Result of the multiplication. */ public mul(point: Point, scalar: GrumpkinScalar): Point { - wasm.writeMemory(0, point.toBuffer()); - wasm.writeMemory(64, scalar.toBuffer()); - wasm.call('ecc_grumpkin__mul', 0, 64, 96); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(96, 160))); + this.wasm.writeMemory(0, point.toBuffer()); + this.wasm.writeMemory(64, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__mul', 0, 64, 96); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(96, 160))); } /** @@ -49,16 +46,16 @@ export class Grumpkin { const concatenatedPoints: Buffer = Buffer.concat(points.map(point => point.toBuffer())); const pointsByteLength = points.length * Point.SIZE_IN_BYTES; - const mem = wasm.call('bbmalloc', pointsByteLength * 2); + const mem = this.wasm.call('bbmalloc', pointsByteLength * 2); - wasm.writeMemory(mem, concatenatedPoints); - wasm.writeMemory(0, scalar.toBuffer()); - wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); + this.wasm.writeMemory(mem, concatenatedPoints); + this.wasm.writeMemory(0, scalar.toBuffer()); + this.wasm.call('ecc_grumpkin__batch_mul', mem, 0, points.length, mem + pointsByteLength); const result: Buffer = Buffer.from( - wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), + this.wasm.getMemorySlice(mem + pointsByteLength, mem + pointsByteLength + pointsByteLength), ); - wasm.call('bbfree', mem); + this.wasm.call('bbfree', mem); const parsedResult: Point[] = []; for (let i = 0; i < pointsByteLength; i += 64) { @@ -72,8 +69,8 @@ export class Grumpkin { * @returns Random field element. */ public getRandomFr(): Fr { - wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(0, 32))); + this.wasm.call('ecc_grumpkin__get_random_scalar_mod_circuit_modulus', 0); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(0, 32))); } /** @@ -82,8 +79,8 @@ export class Grumpkin { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer): Fr { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Fr.fromBuffer(Buffer.from(wasm.getMemorySlice(64, 96))); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_grumpkin__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Fr.fromBuffer(Buffer.from(this.wasm.getMemorySlice(64, 96))); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts index 4ab41700e7d..662d561f3f2 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/schnorr/index.ts @@ -1,27 +1,27 @@ import { BarretenbergSync } from '@aztec/bb.js'; +import { Point } from '@aztec/foundation/fields'; import { numToUInt32BE } from '@aztec/foundation/serialize'; -import { GrumpkinPrivateKey, Point, PublicKey } from '../../../index.js'; +import { GrumpkinPrivateKey, PublicKey } from '../../../types/index.js'; import { SchnorrSignature } from './signature.js'; export * from './signature.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Schnorr signature construction and helper operations. */ export class Schnorr { + private wasm = BarretenbergSync.getSingleton().getWasm(); + /** * Computes a grumpkin public key from a private key. * @param privateKey - The private key. * @returns A grumpkin public key. */ public computePublicKey(privateKey: GrumpkinPrivateKey): PublicKey { - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.call('schnorr_compute_public_key', 0, 32); - return Point.fromBuffer(Buffer.from(wasm.getMemorySlice(32, 96))); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.call('schnorr_compute_public_key', 0, 32); + return Point.fromBuffer(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -31,12 +31,12 @@ export class Schnorr { * @returns A Schnorr signature of the form (s, e). */ public constructSignature(msg: Uint8Array, privateKey: GrumpkinPrivateKey) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, privateKey.toBuffer()); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_construct_signature', mem, 0, 32, 64); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, privateKey.toBuffer()); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_construct_signature', mem, 0, 32, 64); - return new SchnorrSignature(Buffer.from(wasm.getMemorySlice(32, 96))); + return new SchnorrSignature(Buffer.from(this.wasm.getMemorySlice(32, 96))); } /** @@ -47,13 +47,13 @@ export class Schnorr { * @returns True or false. */ public verifySignature(msg: Uint8Array, pubKey: PublicKey, sig: SchnorrSignature) { - const mem = wasm.call('bbmalloc', msg.length + 4); - wasm.writeMemory(0, pubKey.toBuffer()); - wasm.writeMemory(64, sig.s); - wasm.writeMemory(96, sig.e); - wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); - wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); - const result = wasm.getMemorySlice(128, 129); + const mem = this.wasm.call('bbmalloc', msg.length + 4); + this.wasm.writeMemory(0, pubKey.toBuffer()); + this.wasm.writeMemory(64, sig.s); + this.wasm.writeMemory(96, sig.e); + this.wasm.writeMemory(mem, Buffer.concat([numToUInt32BE(msg.length), msg])); + this.wasm.call('schnorr_verify_signature', mem, 0, 64, 96, 128); + const result = this.wasm.getMemorySlice(128, 129); return !Buffer.alloc(1, 0).equals(result); } } diff --git a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts index a388ce602fb..f4afdd82346 100644 --- a/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts +++ b/yarn-project/circuits.js/src/barretenberg/crypto/secp256k1/index.ts @@ -1,12 +1,11 @@ import { BarretenbergSync } from '@aztec/bb.js'; -const api = await BarretenbergSync.getSingleton(); -const wasm = api.getWasm(); - /** * Secp256k1 elliptic curve operations. */ export class Secp256k1 { + private wasm = BarretenbergSync.getSingleton().getWasm(); + // prettier-ignore static generator = Buffer.from([ 0x79, 0xbe, 0x66, 0x7e, 0xf9, 0xdc, 0xbb, 0xac, 0x55, 0xa0, 0x62, 0x95, 0xce, 0x87, 0x0b, 0x07, @@ -30,10 +29,10 @@ export class Secp256k1 { * @returns Result of the multiplication. */ public mul(point: Uint8Array, scalar: Uint8Array) { - wasm.writeMemory(0, point); - wasm.writeMemory(64, scalar); - wasm.call('ecc_secp256k1__mul', 0, 64, 96); - return Buffer.from(wasm.getMemorySlice(96, 160)); + this.wasm.writeMemory(0, point); + this.wasm.writeMemory(64, scalar); + this.wasm.call('ecc_secp256k1__mul', 0, 64, 96); + return Buffer.from(this.wasm.getMemorySlice(96, 160)); } /** @@ -41,8 +40,8 @@ export class Secp256k1 { * @returns Random field element. */ public getRandomFr() { - wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); - return Buffer.from(wasm.getMemorySlice(0, 32)); + this.wasm.call('ecc_secp256k1__get_random_scalar_mod_circuit_modulus', 0); + return Buffer.from(this.wasm.getMemorySlice(0, 32)); } /** @@ -51,8 +50,8 @@ export class Secp256k1 { * @returns Buffer representation of the field element. */ public reduce512BufferToFr(uint512Buf: Buffer) { - wasm.writeMemory(0, uint512Buf); - wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); - return Buffer.from(wasm.getMemorySlice(64, 96)); + this.wasm.writeMemory(0, uint512Buf); + this.wasm.call('ecc_secp256k1__reduce512_buffer_mod_circuit_modulus', 0, 64); + return Buffer.from(this.wasm.getMemorySlice(64, 96)); } } diff --git a/yarn-project/circuits.js/src/constants.gen.ts b/yarn-project/circuits.js/src/constants.gen.ts index 89b201b5e5c..f0cfac323a9 100644 --- a/yarn-project/circuits.js/src/constants.gen.ts +++ b/yarn-project/circuits.js/src/constants.gen.ts @@ -42,13 +42,15 @@ export const CONTRACT_SUBTREE_SIBLING_PATH_LENGTH = 15; export const NOTE_HASH_SUBTREE_HEIGHT = 7; export const NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH = 25; export const NULLIFIER_SUBTREE_HEIGHT = 7; -export const BLOCKS_TREE_HEIGHT = 16; +export const ARCHIVE_HEIGHT = 16; export const NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH = 13; export const L1_TO_L2_MSG_SUBTREE_HEIGHT = 4; export const L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH = 12; export const FUNCTION_SELECTOR_NUM_BYTES = 4; export const MAPPING_SLOT_PEDERSEN_SEPARATOR = 4; export const NUM_FIELDS_PER_SHA256 = 2; +export const ARGS_HASH_CHUNK_LENGTH = 32; +export const ARGS_HASH_CHUNK_COUNT = 16; export const L1_TO_L2_MESSAGE_LENGTH = 8; export const L1_TO_L2_MESSAGE_ORACLE_CALL_LENGTH = 26; export const MAX_NOTE_FIELDS_LENGTH = 20; diff --git a/yarn-project/circuits.js/src/scripts/constants.in.ts b/yarn-project/circuits.js/src/scripts/constants.in.ts index 123bea6ffce..14892b1e1fe 100644 --- a/yarn-project/circuits.js/src/scripts/constants.in.ts +++ b/yarn-project/circuits.js/src/scripts/constants.in.ts @@ -3,7 +3,7 @@ import { fileURLToPath } from '@aztec/foundation/url'; import * as fs from 'fs'; import { dirname, join } from 'path'; -const NOIR_CONSTANTS_FILE = '../../../aztec-nr/aztec/src/constants_gen.nr'; +const NOIR_CONSTANTS_FILE = '../../../noir-protocol-circuits/src/crates/types/src/constants.nr'; const TS_CONSTANTS_FILE = '../constants.gen.ts'; const SOLIDITY_CONSTANTS_FILE = '../../../../l1-contracts/src/core/libraries/ConstantsGen.sol'; diff --git a/yarn-project/circuits.js/src/structs/kernel/block_header.ts b/yarn-project/circuits.js/src/structs/kernel/block_header.ts index 1c83f50351c..f7059e64b60 100644 --- a/yarn-project/circuits.js/src/structs/kernel/block_header.ts +++ b/yarn-project/circuits.js/src/structs/kernel/block_header.ts @@ -31,9 +31,9 @@ export class BlockHeader { */ public l1ToL2MessagesTreeRoot: Fr, /** - * Root of the blocks tree at the time of when this information was assembled. + * Root of the state roots tree (archive) at the block prior to when this information was assembled. */ - public blocksTreeRoot: Fr, + public archiveRoot: Fr, /** * Root of the private kernel vk tree at the time of when this information was assembled. */ @@ -71,7 +71,7 @@ export class BlockHeader { fields.nullifierTreeRoot, fields.contractTreeRoot, fields.l1ToL2MessagesTreeRoot, - fields.blocksTreeRoot, + fields.archiveRoot, fields.privateKernelVkTreeRoot, fields.publicDataTreeRoot, fields.globalVariablesHash, @@ -97,7 +97,7 @@ export class BlockHeader { this.nullifierTreeRoot, this.contractTreeRoot, this.l1ToL2MessagesTreeRoot, - this.blocksTreeRoot, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as + this.archiveRoot, // TODO(#3441) Note private_kernel_vk_tree_root, is not included yet as // it is not present in noir, this.publicDataTreeRoot, this.globalVariablesHash, @@ -128,7 +128,7 @@ export class BlockHeader { this.nullifierTreeRoot.isZero() && this.contractTreeRoot.isZero() && this.l1ToL2MessagesTreeRoot.isZero() && - this.blocksTreeRoot.isZero() && + this.archiveRoot.isZero() && this.privateKernelVkTreeRoot.isZero() && this.publicDataTreeRoot.isZero() && this.globalVariablesHash.isZero() diff --git a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts index 1d6babb0ebe..c6db387a093 100644 --- a/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/base_rollup.ts @@ -1,8 +1,10 @@ +import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Fr } from '@aztec/foundation/fields'; import { BufferReader, Tuple } from '@aztec/foundation/serialize'; +import { IndexedTreeLeaf, IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_SUBTREE_SIBLING_PATH_LENGTH, KERNELS_PER_BASE_ROLLUP, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, @@ -25,28 +27,103 @@ import { AppendOnlyTreeSnapshot } from './append_only_tree_snapshot.js'; * Class containing the data of a preimage of a single leaf in the nullifier tree. * Note: It's called preimage because this data gets hashed before being inserted as a node into the `IndexedTree`. */ -export class NullifierLeafPreimage { +export class NullifierLeafPreimage implements IndexedTreeLeafPreimage { constructor( /** * Leaf value inside the indexed tree's linked list. */ - public leafValue: Fr, + public nullifier: Fr, /** * Next value inside the indexed tree's linked list. */ - public nextValue: Fr, + public nextNullifier: Fr, /** * Index of the next leaf in the indexed tree's linked list. */ - public nextIndex: UInt32, + public nextIndex: bigint, ) {} - toBuffer() { - return serializeToBuffer(this.leafValue, this.nextValue, this.nextIndex); + getKey(): bigint { + return this.nullifier.toBigInt(); + } + + getNextKey(): bigint { + return this.nextNullifier.toBigInt(); + } + + getNextIndex(): bigint { + return this.nextIndex; + } + + asLeaf(): NullifierLeaf { + return new NullifierLeaf(this.nullifier); + } + + toBuffer(): Buffer { + return Buffer.concat(this.toHashInputs()); + } + + toHashInputs(): Buffer[] { + return [ + Buffer.from(this.nullifier.toBuffer()), + Buffer.from(toBufferBE(this.nextIndex, 32)), + Buffer.from(this.nextNullifier.toBuffer()), + ]; + } + + clone(): NullifierLeafPreimage { + return new NullifierLeafPreimage(this.nullifier, this.nextNullifier, this.nextIndex); + } + + static empty(): NullifierLeafPreimage { + return new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0n); + } + + static fromBuffer(buf: Buffer): NullifierLeafPreimage { + const nullifier = Fr.fromBuffer(buf.subarray(0, 32)); + const nextIndex = toBigIntBE(buf.subarray(32, 64)); + const nextNullifier = Fr.fromBuffer(buf.subarray(64, 96)); + return new NullifierLeafPreimage(nullifier, nextNullifier, nextIndex); + } + + static fromLeaf(leaf: NullifierLeaf, nextKey: bigint, nextIndex: bigint): NullifierLeafPreimage { + return new NullifierLeafPreimage(leaf.nullifier, new Fr(nextKey), nextIndex); + } + + static clone(preimage: NullifierLeafPreimage): NullifierLeafPreimage { + return new NullifierLeafPreimage(preimage.nullifier, preimage.nextNullifier, preimage.nextIndex); + } +} + +/** + * A nullifier to be inserted in the nullifier tree. + */ +export class NullifierLeaf implements IndexedTreeLeaf { + constructor( + /** + * Nullifier value. + */ + public nullifier: Fr, + ) {} + + getKey(): bigint { + return this.nullifier.toBigInt(); + } + + toBuffer(): Buffer { + return this.nullifier.toBuffer(); + } + + isEmpty(): boolean { + return this.nullifier.isZero(); + } + + static buildDummy(key: bigint): NullifierLeaf { + return new NullifierLeaf(new Fr(key)); } - static empty() { - return new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0); + static fromBuffer(buf: Buffer): NullifierLeaf { + return new NullifierLeaf(Fr.fromBuffer(buf)); } } @@ -58,7 +135,7 @@ export class ConstantRollupData { /** * Snapshot of the blocks tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public archiveSnapshot: AppendOnlyTreeSnapshot, /** * Root of the private kernel verification key tree. @@ -100,7 +177,7 @@ export class ConstantRollupData { static getFields(fields: FieldsOf) { return [ - fields.startBlocksTreeSnapshot, + fields.archiveSnapshot, fields.privateKernelVkTreeRoot, fields.publicKernelVkTreeRoot, fields.baseRollupVkHash, @@ -142,7 +219,7 @@ export class BaseRollupInputs { /** * Snapshot of the blocks tree at the start of the base rollup circuit. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public archiveSnapshot: AppendOnlyTreeSnapshot, /** * The nullifiers to be inserted in the tree, sorted high to low. @@ -196,8 +273,8 @@ export class BaseRollupInputs { /** * Membership witnesses of blocks referred by each of the 2 kernels. */ - public blocksTreeRootMembershipWitnesses: Tuple< - MembershipWitness, + public archiveRootMembershipWitnesses: Tuple< + MembershipWitness, typeof KERNELS_PER_BASE_ROLLUP >, /** @@ -217,7 +294,7 @@ export class BaseRollupInputs { fields.startNullifierTreeSnapshot, fields.startContractTreeSnapshot, fields.startPublicDataTreeRoot, - fields.startBlocksTreeSnapshot, + fields.archiveSnapshot, fields.sortedNewNullifiers, fields.sortednewNullifiersIndexes, fields.lowNullifierLeafPreimages, @@ -227,7 +304,7 @@ export class BaseRollupInputs { fields.newContractsSubtreeSiblingPath, fields.newPublicDataUpdateRequestsSiblingPaths, fields.newPublicDataReadsSiblingPaths, - fields.blocksTreeRootMembershipWitnesses, + fields.archiveRootMembershipWitnesses, fields.constants, ] as const; } diff --git a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts index 087accf1526..ddcb144be63 100644 --- a/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts +++ b/yarn-project/circuits.js/src/structs/rollup/root_rollup.ts @@ -2,7 +2,7 @@ import { Fr } from '@aztec/foundation/fields'; import { BufferReader, Tuple } from '@aztec/foundation/serialize'; import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, } from '../../constants.gen.js'; @@ -39,11 +39,11 @@ export class RootRollupInputs { /** * Snapshot of the historical block roots tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public startArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Sibling path of the new block tree root. */ - public newBlocksTreeSiblingPath: Tuple, + public newArchiveSiblingPath: Tuple, ) {} toBuffer() { @@ -60,8 +60,8 @@ export class RootRollupInputs { fields.newL1ToL2Messages, fields.newL1ToL2MessagesTreeRootSiblingPath, fields.startL1ToL2MessagesTreeSnapshot, - fields.startBlocksTreeSnapshot, - fields.newBlocksTreeSiblingPath, + fields.startArchiveSnapshot, + fields.newArchiveSiblingPath, ] as const; } } @@ -119,24 +119,6 @@ export class RootRollupPublicInputs { */ public endPublicDataTreeRoot: Fr, - /** - * Snapshot of the historical note hash tree roots tree at the start of the rollup. - */ - public startTreeOfHistoricalNoteHashTreeRootsSnapshot: AppendOnlyTreeSnapshot, - /** - * Snapshot of the historical note hash tree roots tree at the end of the rollup. - */ - public endTreeOfHistoricalNoteHashTreeRootsSnapshot: AppendOnlyTreeSnapshot, - - /** - * Snapshot of the historical contract tree roots tree at the start of the rollup. - */ - public startTreeOfHistoricalContractTreeRootsSnapshot: AppendOnlyTreeSnapshot, - /** - * Snapshot of the historical contract tree roots tree at the end of the rollup. - */ - public endTreeOfHistoricalContractTreeRootsSnapshot: AppendOnlyTreeSnapshot, - /** * Snapshot of the L1 to L2 message tree at the start of the rollup. */ @@ -146,23 +128,14 @@ export class RootRollupPublicInputs { */ public endL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot, - /** - * Snapshot of the historical L1 to L2 message tree roots tree at the start of the rollup. - */ - public startTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot: AppendOnlyTreeSnapshot, - /** - * Snapshot of the historical L1 to L2 message tree roots tree at the end of the rollup. - */ - public endTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot: AppendOnlyTreeSnapshot, - /** * Snapshot of the blocks tree roots tree at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public startArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Snapshot of the blocks tree roots tree at the end of the rollup. */ - public endBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public endArchiveSnapshot: AppendOnlyTreeSnapshot, /** * Hash of the calldata. @@ -186,16 +159,10 @@ export class RootRollupPublicInputs { fields.endContractTreeSnapshot, fields.startPublicDataTreeRoot, fields.endPublicDataTreeRoot, - fields.startTreeOfHistoricalNoteHashTreeRootsSnapshot, - fields.endTreeOfHistoricalNoteHashTreeRootsSnapshot, - fields.startTreeOfHistoricalContractTreeRootsSnapshot, - fields.endTreeOfHistoricalContractTreeRootsSnapshot, fields.startL1ToL2MessagesTreeSnapshot, fields.endL1ToL2MessagesTreeSnapshot, - fields.startTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot, - fields.endTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot, - fields.startBlocksTreeSnapshot, - fields.endBlocksTreeSnapshot, + fields.startArchiveSnapshot, + fields.endArchiveSnapshot, fields.calldataHash, fields.l1ToL2MessagesHash, ] as const; @@ -248,12 +215,6 @@ export class RootRollupPublicInputs { reader.readObject(AppendOnlyTreeSnapshot), reader.readObject(AppendOnlyTreeSnapshot), reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), - reader.readObject(AppendOnlyTreeSnapshot), [Fr.fromBuffer(reader), Fr.fromBuffer(reader)], [Fr.fromBuffer(reader), Fr.fromBuffer(reader)], ); diff --git a/yarn-project/circuits.js/src/tests/factories.ts b/yarn-project/circuits.js/src/tests/factories.ts index 2c08c857243..7c39a943fda 100644 --- a/yarn-project/circuits.js/src/tests/factories.ts +++ b/yarn-project/circuits.js/src/tests/factories.ts @@ -4,10 +4,10 @@ import { numToUInt32BE } from '@aztec/foundation/serialize'; import { SchnorrSignature } from '../barretenberg/index.js'; import { + ARCHIVE_HEIGHT, ARGS_LENGTH, AggregationObject, AppendOnlyTreeSnapshot, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, BlockHeader, @@ -726,7 +726,7 @@ export function makeConstantBaseRollupData( globalVariables: GlobalVariables | undefined = undefined, ): ConstantRollupData { return ConstantRollupData.from({ - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(seed + 0x300), + archiveSnapshot: makeAppendOnlyTreeSnapshot(seed + 0x300), privateKernelVkTreeRoot: fr(seed + 0x401), publicKernelVkTreeRoot: fr(seed + 0x402), baseRollupVkHash: fr(seed + 0x403), @@ -840,7 +840,7 @@ export function makeRootRollupInputs(seed = 0, globalVariables?: GlobalVariables makeTuple(L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, fr, 0x2100), makeAppendOnlyTreeSnapshot(seed + 0x2200), makeAppendOnlyTreeSnapshot(seed + 0x2200), - makeTuple(BLOCKS_TREE_HEIGHT, fr, 0x2400), + makeTuple(ARCHIVE_HEIGHT, fr, 0x2400), ); } @@ -866,16 +866,10 @@ export function makeRootRollupPublicInputs( endContractTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), startPublicDataTreeRoot: fr((seed += 0x100)), endPublicDataTreeRoot: fr((seed += 0x100)), - startTreeOfHistoricalNoteHashTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - endTreeOfHistoricalNoteHashTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - startTreeOfHistoricalContractTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - endTreeOfHistoricalContractTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - startTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - endTreeOfHistoricalL1ToL2MessagesTreeRootsSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), - endBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), + startArchiveSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), + endArchiveSnapshot: makeAppendOnlyTreeSnapshot((seed += 0x100)), calldataHash: [new Fr(1n), new Fr(2n)], l1ToL2MessagesHash: [new Fr(3n), new Fr(4n)], }); @@ -902,11 +896,11 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { const startNullifierTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x200); const startContractTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x300); const startPublicDataTreeRoot = fr(seed + 0x400); - const startBlocksTreeSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x500); + const startArchiveSnapshot = makeAppendOnlyTreeSnapshot(seed + 0x500); const lowNullifierLeafPreimages = makeTuple( MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, - x => new NullifierLeafPreimage(fr(x), fr(x + 0x100), x + 0x200), + x => new NullifierLeafPreimage(fr(x), fr(x + 0x100), BigInt(x + 0x200)), seed + 0x1000, ); @@ -935,8 +929,8 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { seed + 0x8000, ); - const blocksTreeRootMembershipWitnesses = makeTuple(KERNELS_PER_BASE_ROLLUP, x => - makeMembershipWitness(BLOCKS_TREE_HEIGHT, seed + x * 0x1000 + 0x9000), + const archiveRootMembershipWitnesses = makeTuple(KERNELS_PER_BASE_ROLLUP, x => + makeMembershipWitness(ARCHIVE_HEIGHT, seed + x * 0x1000 + 0x9000), ); const constants = makeConstantBaseRollupData(0x100); @@ -948,7 +942,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { startNullifierTreeSnapshot, startContractTreeSnapshot, startPublicDataTreeRoot, - startBlocksTreeSnapshot, + archiveSnapshot: startArchiveSnapshot, sortedNewNullifiers, sortednewNullifiersIndexes, lowNullifierLeafPreimages, @@ -957,7 +951,7 @@ export function makeBaseRollupInputs(seed = 0): BaseRollupInputs { newContractsSubtreeSiblingPath, newPublicDataUpdateRequestsSiblingPaths, newPublicDataReadsSiblingPaths, - blocksTreeRootMembershipWitnesses, + archiveRootMembershipWitnesses, constants, }); } diff --git a/yarn-project/cli/Dockerfile b/yarn-project/cli/Dockerfile index 2d8c842c90f..5663b1b079b 100644 --- a/yarn-project/cli/Dockerfile +++ b/yarn-project/cli/Dockerfile @@ -1,33 +1,13 @@ -FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project AS yarn-project +FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/yarn-project-prod AS yarn-project-prod -# Need new arch specific image. -FROM node:18-alpine as builder -COPY --from=yarn-project /usr/src /usr/src -ARG COMMIT_TAG="" - -WORKDIR /usr/src/yarn-project/cli -RUN if [[ -n "${COMMIT_TAG}" ]]; then \ - jq --arg v ${COMMIT_TAG} '.version = $v' package.json > _temp && mv _temp package.json; \ - fi - -# Productionify. See comment in yarn-project-base/Dockerfile. -RUN yarn workspaces focus --production && yarn cache clean && rm -rf ../**/src - -# Create final, arch specific, minimal size image. -FROM node:18-alpine -COPY --from=builder /usr/src/yarn-project /usr/src/yarn-project -COPY --from=builder /usr/src/barretenberg/ts/package /usr/src/barretenberg/ts/package -COPY --from=builder /usr/src/noir/packages /usr/src/noir/packages +ENTRYPOINT ["node", "--no-warnings", "/usr/src/yarn-project/cli/dest/bin/index.js"] +# Setup cache volume. ENV XDG_CACHE_HOME /cache RUN mkdir /cache && chmod 777 /cache VOLUME [ "/cache" ] +# Run as non-root user. RUN corepack enable - -# run as non-root user RUN addgroup -S aztec && adduser -S aztec -G aztec USER aztec - -ENV NODE_OPTIONS="--no-warnings --preserve-symlinks" -ENTRYPOINT ["node", "/usr/src/yarn-project/cli/dest/bin/index.js"] \ No newline at end of file diff --git a/yarn-project/cli/src/bin/index.ts b/yarn-project/cli/src/bin/index.ts index 014d5e05a24..948d81f2940 100644 --- a/yarn-project/cli/src/bin/index.ts +++ b/yarn-project/cli/src/bin/index.ts @@ -1,6 +1,5 @@ #!/usr/bin/env -S node --no-warnings -import { createDebugLogger } from '@aztec/aztec.js'; -import { createConsoleLogger } from '@aztec/foundation/log'; +import { createConsoleLogger, createDebugLogger } from '@aztec/foundation/log'; import { getProgram } from '../index.js'; @@ -9,6 +8,9 @@ const log = createConsoleLogger(); /** CLI main entrypoint */ async function main() { + process.once('SIGINT', () => process.exit(0)); + process.once('SIGTERM', () => process.exit(0)); + const program = getProgram(log, debugLogger); await program.parseAsync(process.argv); } diff --git a/yarn-project/cli/src/cmds/add_contract.ts b/yarn-project/cli/src/cmds/add_contract.ts new file mode 100644 index 00000000000..6ac361f1fbc --- /dev/null +++ b/yarn-project/cli/src/cmds/add_contract.ts @@ -0,0 +1,27 @@ +import { AztecAddress, CompleteAddress, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function addContract( + rpcUrl: string, + contractArtifactPath: string, + contractAddress: AztecAddress, + partialAddress: Fr, + publicKey: Point, + portalContract: EthAddress | undefined, + debugLogger: DebugLogger, + log: LogFn, +) { + const artifact = await getContractArtifact(contractArtifactPath, log); + const completeAddress = new CompleteAddress(contractAddress, publicKey ?? Fr.ZERO, partialAddress); + const portalContractAddress: EthAddress = portalContract ?? EthAddress.ZERO; + const client = await createCompatibleClient(rpcUrl, debugLogger); + + await client.addContracts([{ artifact, completeAddress, portalContract: portalContractAddress }]); + log(`\nContract added to PXE at ${contractAddress.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/add_note.ts b/yarn-project/cli/src/cmds/add_note.ts new file mode 100644 index 00000000000..64340034370 --- /dev/null +++ b/yarn-project/cli/src/cmds/add_note.ts @@ -0,0 +1,24 @@ +import { AztecAddress, Fr } from '@aztec/aztec.js'; +import { DebugLogger } from '@aztec/foundation/log'; +import { ExtendedNote, Note, TxHash } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; +import { parseFields } from '../utils.js'; + +/** + * + */ +export async function addNote( + address: AztecAddress, + contractAddress: AztecAddress, + storageSlot: Fr, + txHash: TxHash, + noteFields: string[], + rpcUrl: string, + debugLogger: DebugLogger, +) { + const note = new Note(parseFields(noteFields)); + const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.addNote(extendedNote); +} diff --git a/yarn-project/cli/src/cmds/block_number.ts b/yarn-project/cli/src/cmds/block_number.ts new file mode 100644 index 00000000000..37795a12966 --- /dev/null +++ b/yarn-project/cli/src/cmds/block_number.ts @@ -0,0 +1,12 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function blockNumber(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const num = await client.getBlockNumber(); + log(`${num}\n`); +} diff --git a/yarn-project/cli/src/cmds/call.ts b/yarn-project/cli/src/cmds/call.ts new file mode 100644 index 00000000000..7e395276177 --- /dev/null +++ b/yarn-project/cli/src/cmds/call.ts @@ -0,0 +1,35 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { format } from 'util'; + +import { createCompatibleClient } from '../client.js'; +import { getFunctionArtifact, getTxSender, prepTx } from '../utils.js'; + +/** + * + */ +export async function call( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + fromAddress: string | undefined, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const fnArtifact = getFunctionArtifact(contractArtifact, functionName); + if (fnArtifact.parameters.length !== functionArgs.length) { + throw Error( + `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${functionArgs.length}`, + ); + } + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const from = await getTxSender(client, fromAddress); + const result = await client.viewTx(functionName, functionArgs, contractAddress, from); + log(format('\nView result: ', result, '\n')); +} diff --git a/yarn-project/cli/src/cmds/check_deploy.ts b/yarn-project/cli/src/cmds/check_deploy.ts new file mode 100644 index 00000000000..25641418c71 --- /dev/null +++ b/yarn-project/cli/src/cmds/check_deploy.ts @@ -0,0 +1,17 @@ +import { AztecAddress, isContractDeployed } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function checkDeploy(rpcUrl: string, contractAddress: AztecAddress, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const isDeployed = await isContractDeployed(client, contractAddress); + if (isDeployed) { + log(`\nContract found at ${contractAddress.toString()}\n`); + } else { + log(`\nNo contract found at ${contractAddress.toString()}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/compute_selector.ts b/yarn-project/cli/src/cmds/compute_selector.ts new file mode 100644 index 00000000000..d0ef8e14abe --- /dev/null +++ b/yarn-project/cli/src/cmds/compute_selector.ts @@ -0,0 +1,10 @@ +import { FunctionSelector } from '@aztec/foundation/abi'; +import { LogFn } from '@aztec/foundation/log'; + +/** + * + */ +export function computeSelector(functionSignature: string, log: LogFn) { + const selector = FunctionSelector.fromSignature(functionSignature); + log(`${selector}`); +} diff --git a/yarn-project/cli/src/cmds/create_account.ts b/yarn-project/cli/src/cmds/create_account.ts new file mode 100644 index 00000000000..f178409c82d --- /dev/null +++ b/yarn-project/cli/src/cmds/create_account.ts @@ -0,0 +1,39 @@ +import { GrumpkinScalar, getSchnorrAccount } from '@aztec/aztec.js'; +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function createAccount( + rpcUrl: string, + privateKey: Fq, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); + + const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, Fr.ZERO); + const { address, publicKey, partialAddress } = account.getCompleteAddress(); + const tx = await account.deploy(); + const txHash = await tx.getTxHash(); + debugLogger(`Account contract tx sent with hash ${txHash}`); + if (wait) { + log(`\nWaiting for account contract deployment...`); + await tx.wait(); + } else { + log(`\nAccount deployment transaction hash: ${txHash}\n`); + } + + log(`\nNew account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + if (!privateKey) { + log(`Private key: ${actualPrivateKey.toString()}`); + } + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/deploy.ts b/yarn-project/cli/src/cmds/deploy.ts new file mode 100644 index 00000000000..459f5498c12 --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy.ts @@ -0,0 +1,77 @@ +import { ContractDeployer, EthAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { encodeArgs } from '../encoding.js'; +import { GITHUB_TAG_PREFIX } from '../github.js'; +import { getContractArtifact, getFunctionArtifact } from '../utils.js'; + +/** + * + */ +export async function deploy( + artifactPath: string, + json: boolean, + rpcUrl: string, + publicKey: Point | undefined, + rawArgs: any[], + portalAddress: EthAddress, + salt: Fr, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, + logJson: (output: any) => void, +) { + const contractArtifact = await getContractArtifact(artifactPath, log); + const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const nodeInfo = await client.getNodeInfo(); + const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; + if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { + log( + `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, + ); + } + + const deployer = new ContractDeployer(contractArtifact, client, publicKey); + + const constructor = getFunctionArtifact(contractArtifact, 'constructor'); + if (!constructor) { + throw new Error(`Constructor not found in contract ABI`); + } + + debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); + const args = encodeArgs(rawArgs, constructorArtifact!.parameters); + debugLogger(`Encoded arguments: ${args.join(', ')}`); + + const deploy = deployer.deploy(...args); + + await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); + const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); + const txHash = await tx.getTxHash(); + debugLogger(`Deploy tx sent with hash ${txHash}`); + if (wait) { + const deployed = await tx.wait(); + const { address, partialAddress } = deployed.contract.completeAddress; + if (json) { + logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); + } else { + log(`\nContract deployed at ${address.toString()}\n`); + log(`Contract partial address ${partialAddress.toString()}\n`); + } + } else { + const { address, partialAddress } = deploy.completeAddress ?? {}; + if (json) { + logJson({ + address: address?.toString() ?? 'N/A', + partialAddress: partialAddress?.toString() ?? 'N/A', + txHash: txHash.toString(), + }); + } else { + log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); + log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); + log(`Deployment transaction hash: ${txHash}\n`); + } + } +} diff --git a/yarn-project/cli/src/cmds/deploy_l1_contracts.ts b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts new file mode 100644 index 00000000000..3b45537d88a --- /dev/null +++ b/yarn-project/cli/src/cmds/deploy_l1_contracts.ts @@ -0,0 +1,25 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { deployAztecContracts } from '../utils.js'; + +/** + * + */ +export async function deployL1Contracts( + rpcUrl: string, + apiKey: string, + privateKey: string, + mnemonic: string, + log: LogFn, + debugLogger: DebugLogger, +) { + const { l1ContractAddresses } = await deployAztecContracts(rpcUrl, apiKey, privateKey, mnemonic, debugLogger); + + log('\n'); + log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); + log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); + log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); + log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); + log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/example_contracts.ts b/yarn-project/cli/src/cmds/example_contracts.ts new file mode 100644 index 00000000000..a5b71e2ec0d --- /dev/null +++ b/yarn-project/cli/src/cmds/example_contracts.ts @@ -0,0 +1,12 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { getExampleContractArtifacts } from '../utils.js'; + +/** + * + */ +export async function exampleContracts(log: LogFn) { + const abisList = await getExampleContractArtifacts(); + const names = Object.keys(abisList); + names.forEach(name => log(name)); +} diff --git a/yarn-project/cli/src/cmds/generate_p2p_private_key.ts b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts new file mode 100644 index 00000000000..4bf3ad7a5c4 --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_p2p_private_key.ts @@ -0,0 +1,13 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; + +/** + * + */ +export async function generateP2PPrivateKey(log: LogFn) { + const peerId = await createSecp256k1PeerId(); + const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); + log(`Private key: ${exportedPeerId}`); + log(`Peer Id: ${peerId}`); +} diff --git a/yarn-project/cli/src/cmds/generate_private_key.ts b/yarn-project/cli/src/cmds/generate_private_key.ts new file mode 100644 index 00000000000..8586f03f37a --- /dev/null +++ b/yarn-project/cli/src/cmds/generate_private_key.ts @@ -0,0 +1,23 @@ +import { GrumpkinScalar, generatePublicKey } from '@aztec/aztec.js'; +import { LogFn } from '@aztec/foundation/log'; + +import { mnemonicToAccount } from 'viem/accounts'; + +/** + * + */ +export function generatePrivateKey(mnemonic: string | undefined, log: LogFn) { + let privKey; + let publicKey; + if (mnemonic) { + const acc = mnemonicToAccount(mnemonic); + // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. + const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); + publicKey = generatePublicKey(key); + } else { + const key = GrumpkinScalar.random(); + privKey = key.toString(); + publicKey = generatePublicKey(key); + } + log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); +} diff --git a/yarn-project/cli/src/cmds/get_account.ts b/yarn-project/cli/src/cmds/get_account.ts new file mode 100644 index 00000000000..47b3b1056a7 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_account.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccount(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const account = await client.getRegisteredAccount(aztecAddress); + + if (!account) { + log(`Unknown account ${aztecAddress.toString()}`); + } else { + log(account.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_accounts.ts b/yarn-project/cli/src/cmds/get_accounts.ts new file mode 100644 index 00000000000..155e92d5a4e --- /dev/null +++ b/yarn-project/cli/src/cmds/get_accounts.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getAccounts(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const accounts = await client.getRegisteredAccounts(); + if (!accounts.length) { + log('No accounts found.'); + } else { + log(`Accounts found: \n`); + for (const account of accounts) { + log(account.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_contract_data.ts b/yarn-project/cli/src/cmds/get_contract_data.ts new file mode 100644 index 00000000000..16d13047972 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_contract_data.ts @@ -0,0 +1,39 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { ContractData } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getContractData( + rpcUrl: string, + contractAddress: AztecAddress, + includeBytecode: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const contractDataWithOrWithoutBytecode = includeBytecode + ? await client.getExtendedContractData(contractAddress) + : await client.getContractData(contractAddress); + + if (!contractDataWithOrWithoutBytecode) { + log(`No contract data found at ${contractAddress}`); + return; + } + let contractData: ContractData; + + if ('contractData' in contractDataWithOrWithoutBytecode) { + contractData = contractDataWithOrWithoutBytecode.contractData; + } else { + contractData = contractDataWithOrWithoutBytecode; + } + log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); + log(`Portal: ${contractData.portalContractAddress.toString()}`); + if ('bytecode' in contractDataWithOrWithoutBytecode) { + log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); + } + log('\n'); +} diff --git a/yarn-project/cli/src/cmds/get_logs.ts b/yarn-project/cli/src/cmds/get_logs.ts new file mode 100644 index 00000000000..73a6501b9cf --- /dev/null +++ b/yarn-project/cli/src/cmds/get_logs.ts @@ -0,0 +1,71 @@ +import { AztecAddress, FunctionSelector, LogFilter, LogId, TxHash } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { sleep } from '@aztec/foundation/sleep'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getLogs( + txHash: TxHash, + fromBlock: number, + toBlock: number, + afterLog: LogId, + contractAddress: AztecAddress, + selector: FunctionSelector, + rpcUrl: string, + follow: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const pxe = await createCompatibleClient(rpcUrl, debugLogger); + + if (follow) { + if (txHash) { + throw Error('Cannot use --follow with --tx-hash'); + } + if (toBlock) { + throw Error('Cannot use --follow with --to-block'); + } + } + + const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; + + const fetchLogs = async () => { + const response = await pxe.getUnencryptedLogs(filter); + const logs = response.logs; + + if (!logs.length) { + const filterOptions = Object.entries(filter) + .filter(([, value]) => value !== undefined) + .map(([key, value]) => `${key}: ${value}`) + .join(', '); + if (!follow) { + log(`No logs found for filter: {${filterOptions}}`); + } + } else { + if (!follow && !filter.afterLog) { + log('Logs found: \n'); + } + logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); + // Set the continuation parameter for the following requests + filter.afterLog = logs[logs.length - 1].id; + } + return response.maxLogsHit; + }; + + if (follow) { + log('Fetching logs...'); + while (true) { + const maxLogsHit = await fetchLogs(); + if (!maxLogsHit) { + await sleep(1000); + } + } + } else { + while (await fetchLogs()) { + // Keep fetching logs until we reach the end. + } + } +} diff --git a/yarn-project/cli/src/cmds/get_node_info.ts b/yarn-project/cli/src/cmds/get_node_info.ts new file mode 100644 index 00000000000..bc71a7bc8cc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_node_info.ts @@ -0,0 +1,17 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getNodeInfo(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const info = await client.getNodeInfo(); + log(`\nNode Info:\n`); + log(`Sandbox Version: ${info.sandboxVersion}\n`); + log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); + log(`Chain Id: ${info.chainId}\n`); + log(`Protocol Version: ${info.protocolVersion}\n`); + log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/get_recipient.ts b/yarn-project/cli/src/cmds/get_recipient.ts new file mode 100644 index 00000000000..9edf6edecfc --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipient.ts @@ -0,0 +1,18 @@ +import { AztecAddress } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipient(aztecAddress: AztecAddress, rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipient = await client.getRecipient(aztecAddress); + + if (!recipient) { + log(`Unknown recipient ${aztecAddress.toString()}`); + } else { + log(recipient.toReadableString()); + } +} diff --git a/yarn-project/cli/src/cmds/get_recipients.ts b/yarn-project/cli/src/cmds/get_recipients.ts new file mode 100644 index 00000000000..92bc9fad973 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_recipients.ts @@ -0,0 +1,19 @@ +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getRecipients(rpcUrl: string, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const recipients = await client.getRecipients(); + if (!recipients.length) { + log('No recipients found.'); + } else { + log(`Recipients found: \n`); + for (const recipient of recipients) { + log(recipient.toReadableString()); + } + } +} diff --git a/yarn-project/cli/src/cmds/get_tx_receipt.ts b/yarn-project/cli/src/cmds/get_tx_receipt.ts new file mode 100644 index 00000000000..fe133608820 --- /dev/null +++ b/yarn-project/cli/src/cmds/get_tx_receipt.ts @@ -0,0 +1,18 @@ +import { TxHash } from '@aztec/aztec.js'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function getTxReceipt(rpcUrl: string, txHash: TxHash, debugLogger: DebugLogger, log: LogFn) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + const receipt = await client.getTxReceipt(txHash); + if (!receipt) { + log(`No receipt found for transaction hash ${txHash.toString()}`); + } else { + log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); + } +} diff --git a/yarn-project/cli/src/cmds/inspect_contract.ts b/yarn-project/cli/src/cmds/inspect_contract.ts new file mode 100644 index 00000000000..e55954adc1e --- /dev/null +++ b/yarn-project/cli/src/cmds/inspect_contract.ts @@ -0,0 +1,29 @@ +import { + FunctionSelector, + decodeFunctionSignature, + decodeFunctionSignatureWithParameterNames, +} from '@aztec/foundation/abi'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function inspectContract(contractArtifactFile: string, debugLogger: DebugLogger, log: LogFn) { + const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); + const contractFns = contractArtifact.functions.filter( + f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', + ); + if (contractFns.length === 0) { + log(`No external functions found for contract ${contractArtifact.name}`); + } + for (const fn of contractFns) { + const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); + const signature = decodeFunctionSignature(fn.name, fn.parameters); + const selector = FunctionSelector.fromSignature(signature); + log( + `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, + ); + } +} diff --git a/yarn-project/cli/src/cmds/parse_parameter_struct.ts b/yarn-project/cli/src/cmds/parse_parameter_struct.ts new file mode 100644 index 00000000000..1ef572fd5ce --- /dev/null +++ b/yarn-project/cli/src/cmds/parse_parameter_struct.ts @@ -0,0 +1,30 @@ +import { StructType } from '@aztec/foundation/abi'; +import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { LogFn } from '@aztec/foundation/log'; + +import { parseStructString } from '../encoding.js'; +import { getContractArtifact } from '../utils.js'; + +/** + * + */ +export async function parseParameterStruct( + encodedString: string, + contractArtifactPath: string, + parameterName: string, + log: LogFn, +) { + const contractArtifact = await getContractArtifact(contractArtifactPath, log); + const parameterAbitype = contractArtifact.functions + .map(({ parameters }) => parameters) + .flat() + .find(({ name, type }) => name === parameterName && type.kind === 'struct'); + + if (!parameterAbitype) { + log(`No struct parameter found with name ${parameterName}`); + return; + } + + const data = parseStructString(encodedString, parameterAbitype.type as StructType); + log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); +} diff --git a/yarn-project/cli/src/cmds/register_account.ts b/yarn-project/cli/src/cmds/register_account.ts new file mode 100644 index 00000000000..fae880f81a1 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_account.ts @@ -0,0 +1,24 @@ +import { Fq, Fr } from '@aztec/foundation/fields'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerAccount( + rpcUrl: string, + privateKey: Fq, + partialAddress: Fr, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + + const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); + + log(`\nRegistered account:\n`); + log(`Address: ${address.toString()}`); + log(`Public key: ${publicKey.toString()}`); + log(`Partial address: ${partialAddress.toString()}`); +} diff --git a/yarn-project/cli/src/cmds/register_recipient.ts b/yarn-project/cli/src/cmds/register_recipient.ts new file mode 100644 index 00000000000..e2b3aed2f16 --- /dev/null +++ b/yarn-project/cli/src/cmds/register_recipient.ts @@ -0,0 +1,21 @@ +import { AztecAddress, Fr, Point } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; +import { CompleteAddress } from '@aztec/types'; + +import { createCompatibleClient } from '../client.js'; + +/** + * + */ +export async function registerRecipient( + aztecAddress: AztecAddress, + publicKey: Point, + partialAddress: Fr, + rpcUrl: string, + debugLogger: DebugLogger, + log: LogFn, +) { + const client = await createCompatibleClient(rpcUrl, debugLogger); + await client.registerRecipient(CompleteAddress.create(aztecAddress, publicKey, partialAddress)); + log(`\nRegistered details for account with address: ${aztecAddress}\n`); +} diff --git a/yarn-project/cli/src/cmds/send.ts b/yarn-project/cli/src/cmds/send.ts new file mode 100644 index 00000000000..cb8c3bfb413 --- /dev/null +++ b/yarn-project/cli/src/cmds/send.ts @@ -0,0 +1,40 @@ +import { AztecAddress, Contract, Fq, Fr, getSchnorrAccount } from '@aztec/aztec.js'; +import { DebugLogger, LogFn } from '@aztec/foundation/log'; + +import { createCompatibleClient } from '../client.js'; +import { prepTx } from '../utils.js'; + +/** + * + */ +export async function send( + functionName: string, + functionArgsIn: any[], + contractArtifactPath: string, + contractAddress: AztecAddress, + privateKey: Fq, + rpcUrl: string, + wait: boolean, + debugLogger: DebugLogger, + log: LogFn, +) { + const { functionArgs, contractArtifact } = await prepTx(contractArtifactPath, functionName, functionArgsIn, log); + + const client = await createCompatibleClient(rpcUrl, debugLogger); + const wallet = await getSchnorrAccount(client, privateKey, privateKey, Fr.ZERO).getWallet(); + const contract = await Contract.at(contractAddress, contractArtifact, wallet); + const tx = contract.methods[functionName](...functionArgs).send(); + log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); + if (wait) { + await tx.wait(); + + log('Transaction has been mined'); + + const receipt = await tx.getReceipt(); + log(`Status: ${receipt.status}\n`); + log(`Block number: ${receipt.blockNumber}`); + log(`Block hash: ${receipt.blockHash?.toString('hex')}`); + } else { + log('Transaction pending. Check status with get-tx-receipt'); + } +} diff --git a/yarn-project/cli/src/cmds/unbox.ts b/yarn-project/cli/src/cmds/unbox.ts new file mode 100644 index 00000000000..b84694e2608 --- /dev/null +++ b/yarn-project/cli/src/cmds/unbox.ts @@ -0,0 +1,11 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { unboxContract } from '../unbox.js'; + +/** + * + */ +export async function unbox(contractName: string, localDirectory: string | undefined, cliVersion: string, log: LogFn) { + const unboxTo: string = localDirectory ? localDirectory : contractName; + await unboxContract(contractName, unboxTo, cliVersion, log); +} diff --git a/yarn-project/cli/src/index.ts b/yarn-project/cli/src/index.ts index 629efbe4eaa..6f44a6ca8b3 100644 --- a/yarn-project/cli/src/index.ts +++ b/yarn-project/cli/src/index.ts @@ -1,50 +1,16 @@ -import { - AztecAddress, - Contract, - ContractDeployer, - EthAddress, - Fr, - GrumpkinScalar, - Note, - generatePublicKey, - getSchnorrAccount, - isContractDeployed, -} from '@aztec/aztec.js'; -import { - FunctionSelector, - StructType, - decodeFunctionSignature, - decodeFunctionSignatureWithParameterNames, -} from '@aztec/foundation/abi'; -import { JsonStringify } from '@aztec/foundation/json-rpc'; +import { initAztecJs } from '@aztec/aztec.js/init'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { sleep } from '@aztec/foundation/sleep'; import { fileURLToPath } from '@aztec/foundation/url'; -import { compileNoir, generateNoirInterface, generateTypescriptInterface } from '@aztec/noir-compiler/cli'; -import { CompleteAddress, ContractData, ExtendedNote, LogFilter } from '@aztec/types'; +import { addNoirCompilerCommanderActions } from '@aztec/noir-compiler/cli'; -import { createSecp256k1PeerId } from '@libp2p/peer-id-factory'; import { Command, Option } from 'commander'; import { readFileSync } from 'fs'; import { dirname, resolve } from 'path'; -import { format } from 'util'; -import { mnemonicToAccount } from 'viem/accounts'; - -import { createCompatibleClient } from './client.js'; -import { encodeArgs, parseStructString } from './encoding.js'; -import { GITHUB_TAG_PREFIX } from './github.js'; -import { unboxContract } from './unbox.js'; -import { update } from './update/update.js'; + import { - deployAztecContracts, - getContractArtifact, - getExampleContractArtifacts, - getFunctionArtifact, - getTxSender, parseAztecAddress, parseEthereumAddress, parseField, - parseFields, parseOptionalAztecAddress, parseOptionalInteger, parseOptionalLogId, @@ -55,11 +21,8 @@ import { parsePublicKey, parseSaltFromHexString, parseTxHash, - prepTx, } from './utils.js'; -const accountCreationSalt = Fr.ZERO; - const { ETHEREUM_HOST = 'http://localhost:8545', PRIVATE_KEY, API_KEY } = process.env; /** @@ -88,6 +51,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argParser(parsePrivateKey) .makeOptionMandatory(mandatory); + program.hook('preAction', initAztecJs); + program .command('deploy-l1-contracts') .description('Deploys all necessary Ethereum contracts for Aztec.') @@ -104,20 +69,15 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'test test test test test test test test test test test junk', ) .action(async options => { - const { l1ContractAddresses } = await deployAztecContracts( + const { deployL1Contracts } = await import('./cmds/deploy_l1_contracts.js'); + await deployL1Contracts( options.rpcUrl, options.apiKey ?? '', options.privateKey, options.mnemonic, + log, debugLogger, ); - log('\n'); - log(`Rollup Address: ${l1ContractAddresses.rollupAddress.toString()}`); - log(`Registry Address: ${l1ContractAddresses.registryAddress.toString()}`); - log(`L1 -> L2 Inbox Address: ${l1ContractAddresses.inboxAddress.toString()}`); - log(`L2 -> L1 Outbox address: ${l1ContractAddresses.outboxAddress.toString()}`); - log(`Contract Deployment Emitter Address: ${l1ContractAddresses.contractDeploymentEmitterAddress.toString()}`); - log('\n'); }); program @@ -130,20 +90,9 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { '-m, --mnemonic', 'An optional mnemonic string used for the private key generation. If not provided, random private key will be generated.', ) - .action(options => { - let privKey; - let publicKey; - if (options.mnemonic) { - const acc = mnemonicToAccount(options.mnemonic); - // TODO(#2052): This reduction is not secure enough. TACKLE THIS ISSUE BEFORE MAINNET. - const key = GrumpkinScalar.fromBufferReduce(Buffer.from(acc.getHdKey().privateKey!)); - publicKey = generatePublicKey(key); - } else { - const key = GrumpkinScalar.random(); - privKey = key.toString(); - publicKey = generatePublicKey(key); - } - log(`\nPrivate Key: ${privKey}\nPublic Key: ${publicKey.toString()}\n`); + .action(async options => { + const { generatePrivateKey } = await import('./cmds/generate_private_key.js'); + generatePrivateKey(options.mnemonic, log); }); program @@ -151,10 +100,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .summary('Generates a LibP2P peer private key.') .description('Generates a private key that can be used for running a node on a LibP2P network.') .action(async () => { - const peerId = await createSecp256k1PeerId(); - const exportedPeerId = Buffer.from(peerId.privateKey!).toString('hex'); - log(`Private key: ${exportedPeerId}`); - log(`Peer Id: ${peerId}`); + const { generateP2PPrivateKey } = await import('./cmds/generate_p2p_private_key.js'); + await generateP2PPrivateKey(log); }); program @@ -171,28 +118,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async ({ rpcUrl, privateKey, wait }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - const actualPrivateKey = privateKey ?? GrumpkinScalar.random(); - - const account = getSchnorrAccount(client, actualPrivateKey, actualPrivateKey, accountCreationSalt); - const { address, publicKey, partialAddress } = account.getCompleteAddress(); - const tx = await account.deploy(); - const txHash = await tx.getTxHash(); - debugLogger(`Account contract tx sent with hash ${txHash}`); - if (wait) { - log(`\nWaiting for account contract deployment...`); - await tx.wait(); - } else { - log(`\nAccount deployment transaction hash: ${txHash}\n`); - } - - log(`\nNew account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - if (!privateKey) { - log(`Private key: ${actualPrivateKey.toString()}`); - } - log(`Partial address: ${partialAddress.toString()}`); + const { createAccount } = await import('./cmds/create_account.js'); + await createAccount(rpcUrl, privateKey, wait, debugLogger, log); }); program @@ -209,14 +136,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ rpcUrl, privateKey, partialAddress }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - - const { address, publicKey } = await client.registerAccount(privateKey, partialAddress); - - log(`\nRegistered account:\n`); - log(`Address: ${address.toString()}`); - log(`Public key: ${publicKey.toString()}`); - log(`Partial address: ${partialAddress.toString()}`); + const { registerAccount } = await import('./cmds/register_account.js'); + await registerAccount(rpcUrl, privateKey, partialAddress, debugLogger, log); }); program @@ -248,58 +169,20 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { // https://github.com/tj/commander.js#other-option-types-negatable-boolean-and-booleanvalue .option('--no-wait', 'Skip waiting for the contract to be deployed. Print the hash of deployment transaction') .action(async (artifactPath, { json, rpcUrl, publicKey, args: rawArgs, portalAddress, salt, wait }) => { - const contractArtifact = await getContractArtifact(artifactPath, log); - const constructorArtifact = contractArtifact.functions.find(({ name }) => name === 'constructor'); - - const client = await createCompatibleClient(rpcUrl, debugLogger); - const nodeInfo = await client.getNodeInfo(); - const expectedAztecNrVersion = `${GITHUB_TAG_PREFIX}-v${nodeInfo.sandboxVersion}`; - if (contractArtifact.aztecNrVersion && contractArtifact.aztecNrVersion !== expectedAztecNrVersion) { - log( - `\nWarning: Contract was compiled with a different version of Aztec.nr: ${contractArtifact.aztecNrVersion}. Consider updating Aztec.nr to ${expectedAztecNrVersion}\n`, - ); - } - - const deployer = new ContractDeployer(contractArtifact, client, publicKey); - - const constructor = getFunctionArtifact(contractArtifact, 'constructor'); - if (!constructor) { - throw new Error(`Constructor not found in contract ABI`); - } - - debugLogger(`Input arguments: ${rawArgs.map((x: any) => `"${x}"`).join(', ')}`); - const args = encodeArgs(rawArgs, constructorArtifact!.parameters); - debugLogger(`Encoded arguments: ${args.join(', ')}`); - - const deploy = deployer.deploy(...args); - - await deploy.create({ contractAddressSalt: salt, portalContract: portalAddress }); - const tx = deploy.send({ contractAddressSalt: salt, portalContract: portalAddress }); - const txHash = await tx.getTxHash(); - debugLogger(`Deploy tx sent with hash ${txHash}`); - if (wait) { - const deployed = await tx.wait(); - const { address, partialAddress } = deployed.contract.completeAddress; - if (json) { - logJson({ address: address.toString(), partialAddress: partialAddress.toString() }); - } else { - log(`\nContract deployed at ${address.toString()}\n`); - log(`Contract partial address ${partialAddress.toString()}\n`); - } - } else { - const { address, partialAddress } = deploy.completeAddress ?? {}; - if (json) { - logJson({ - address: address?.toString() ?? 'N/A', - partialAddress: partialAddress?.toString() ?? 'N/A', - txHash: txHash.toString(), - }); - } else { - log(`\nContract Address: ${deploy.completeAddress?.address.toString() ?? 'N/A'}`); - log(`Contract Partial Address: ${deploy.completeAddress?.partialAddress.toString() ?? 'N/A'}`); - log(`Deployment transaction hash: ${txHash}\n`); - } - } + const { deploy } = await import('./cmds/deploy.js'); + await deploy( + artifactPath, + json, + rpcUrl, + publicKey, + rawArgs, + portalAddress, + salt, + wait, + debugLogger, + log, + logJson, + ); }); program @@ -312,14 +195,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const address = options.contractAddress; - const isDeployed = await isContractDeployed(client, address); - if (isDeployed) { - log(`\nContract found at ${address.toString()}\n`); - } else { - log(`\nNo contract found at ${address.toString()}\n`); - } + const { checkDeploy } = await import('./cmds/check_deploy.js'); + await checkDeploy(options.rpcUrl, options.contractAddress, debugLogger, log); }); program @@ -337,32 +214,27 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('--portal-address
', 'Optional address to a portal contract on L1', parseEthereumAddress) .addOption(pxeOption) .action(async options => { - const artifact = await getContractArtifact(options.contractArtifact, log); - const contractAddress: AztecAddress = options.contractAddress; - const completeAddress = new CompleteAddress( - contractAddress, - options.publicKey ?? Fr.ZERO, + const { addContract } = await import('./cmds/add_contract.js'); + await addContract( + options.rpcUrl, + options.contractArtifact, + options.contractAddress, options.partialAddress, + options.publicKey, + options.portalContract, + debugLogger, + log, ); - const portalContract: EthAddress = options.portalContract ?? EthAddress.ZERO; - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - - await client.addContracts([{ artifact, completeAddress, portalContract }]); - log(`\nContract added to PXE at ${contractAddress.toString()}\n`); }); + program .command('get-tx-receipt') .description('Gets the receipt for the specified transaction hash.') .argument('', 'A transaction hash to get the receipt for.', parseTxHash) .addOption(pxeOption) .action(async (txHash, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const receipt = await client.getTxReceipt(txHash); - if (!receipt) { - log(`No receipt found for transaction hash ${txHash.toString()}`); - } else { - log(`\nTransaction receipt: \n${JsonStringify(receipt, true)}\n`); - } + const { getTxReceipt } = await import('./cmds/get_tx_receipt.js'); + await getTxReceipt(options.rpcUrl, txHash, debugLogger, log); }); program @@ -372,28 +244,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('-b, --include-bytecode ', "Include the contract's public function bytecode, if any.", false) .action(async (contractAddress, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const contractDataWithOrWithoutBytecode = options.includeBytecode - ? await client.getExtendedContractData(contractAddress) - : await client.getContractData(contractAddress); - - if (!contractDataWithOrWithoutBytecode) { - log(`No contract data found at ${contractAddress}`); - return; - } - let contractData: ContractData; - - if ('contractData' in contractDataWithOrWithoutBytecode) { - contractData = contractDataWithOrWithoutBytecode.contractData; - } else { - contractData = contractDataWithOrWithoutBytecode; - } - log(`\nContract Data: \nAddress: ${contractData.contractAddress.toString()}`); - log(`Portal: ${contractData.portalContractAddress.toString()}`); - if ('bytecode' in contractDataWithOrWithoutBytecode) { - log(`Bytecode: ${contractDataWithOrWithoutBytecode.bytecode}`); - } - log('\n'); + const { getContractData } = await import('./cmds/get_contract_data.js'); + await getContractData(options.rpcUrl, contractAddress, options.includeBytecode, debugLogger, log); }); program @@ -412,55 +264,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--follow', 'If set, will keep polling for new logs until interrupted.') .action(async ({ txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow }) => { - const pxe = await createCompatibleClient(rpcUrl, debugLogger); - - if (follow) { - if (txHash) { - throw Error('Cannot use --follow with --tx-hash'); - } - if (toBlock) { - throw Error('Cannot use --follow with --to-block'); - } - } - - const filter: LogFilter = { txHash, fromBlock, toBlock, afterLog, contractAddress, selector }; - - const fetchLogs = async () => { - const response = await pxe.getUnencryptedLogs(filter); - const logs = response.logs; - - if (!logs.length) { - const filterOptions = Object.entries(filter) - .filter(([, value]) => value !== undefined) - .map(([key, value]) => `${key}: ${value}`) - .join(', '); - if (!follow) { - log(`No logs found for filter: {${filterOptions}}`); - } - } else { - if (!follow && !filter.afterLog) { - log('Logs found: \n'); - } - logs.forEach(unencryptedLog => log(unencryptedLog.toHumanReadable())); - // Set the continuation parameter for the following requests - filter.afterLog = logs[logs.length - 1].id; - } - return response.maxLogsHit; - }; - - if (follow) { - log('Fetching logs...'); - while (true) { - const maxLogsHit = await fetchLogs(); - if (!maxLogsHit) { - await sleep(1000); - } - } - } else { - while (await fetchLogs()) { - // Keep fetching logs until we reach the end. - } - } + const { getLogs } = await import('./cmds/get_logs.js'); + await getLogs(txHash, fromBlock, toBlock, afterLog, contractAddress, selector, rpcUrl, follow, debugLogger, log); }); program @@ -475,9 +280,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .addOption(pxeOption) .action(async ({ address, publicKey, partialAddress, rpcUrl }) => { - const client = await createCompatibleClient(rpcUrl, debugLogger); - await client.registerRecipient(CompleteAddress.create(address, publicKey, partialAddress)); - log(`\nRegistered details for account with address: ${address}\n`); + const { registerRecipient } = await import('./cmds/register_recipient.js'); + await registerRecipient(address, publicKey, partialAddress, rpcUrl, debugLogger, log); }); program @@ -485,16 +289,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the Aztec accounts stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const accounts = await client.getRegisteredAccounts(); - if (!accounts.length) { - log('No accounts found.'); - } else { - log(`Accounts found: \n`); - for (const account of accounts) { - log(account.toReadableString()); - } - } + const { getAccounts } = await import('./cmds/get_accounts.js'); + await getAccounts(options.rpcUrl, debugLogger, log); }); program @@ -503,14 +299,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get account for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const account = await client.getRegisteredAccount(address); - - if (!account) { - log(`Unknown account ${address.toString()}`); - } else { - log(account.toReadableString()); - } + const { getAccount } = await import('./cmds/get_account.js'); + await getAccount(address, options.rpcUrl, debugLogger, log); }); program @@ -518,16 +308,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets all the recipients stored in the PXE.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipients = await client.getRecipients(); - if (!recipients.length) { - log('No recipients found.'); - } else { - log(`Recipients found: \n`); - for (const recipient of recipients) { - log(recipient.toReadableString()); - } - } + const { getRecipients } = await import('./cmds/get_recipients.js'); + await getRecipients(options.rpcUrl, debugLogger, log); }); program @@ -536,14 +318,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .argument('
', 'The Aztec address to get recipient for', parseAztecAddress) .addOption(pxeOption) .action(async (address, options) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const recipient = await client.getRecipient(address); - - if (!recipient) { - log(`Unknown recipient ${address.toString()}`); - } else { - log(recipient.toReadableString()); - } + const { getRecipient } = await import('./cmds/get_recipient.js'); + await getRecipient(address, options.rpcUrl, debugLogger, log); }); program @@ -560,31 +336,18 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .addOption(pxeOption) .option('--no-wait', 'Print transaction hash without waiting for it to be mined') .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { send } = await import('./cmds/send.js'); + await send( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.privateKey, + options.rpcUrl, + !options.noWait, + debugLogger, log, ); - const { contractAddress, privateKey } = options; - - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const wallet = await getSchnorrAccount(client, privateKey, privateKey, accountCreationSalt).getWallet(); - const contract = await Contract.at(contractAddress, contractArtifact, wallet); - const tx = contract.methods[functionName](...functionArgs).send(); - log(`\nTransaction hash: ${(await tx.getTxHash()).toString()}`); - if (options.wait) { - await tx.wait(); - - log('Transaction has been mined'); - - const receipt = await tx.getReceipt(); - log(`Status: ${receipt.status}\n`); - log(`Block number: ${receipt.blockNumber}`); - log(`Block hash: ${receipt.blockHash?.toString('hex')}`); - } else { - log('Transaction pending. Check status with get-tx-receipt'); - } }); program @@ -602,23 +365,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('-f, --from ', 'Aztec address of the caller. If empty, will use the first account from RPC.') .addOption(pxeOption) .action(async (functionName, options) => { - const { functionArgs, contractArtifact } = await prepTx( - options.contractArtifact, + const { call } = await import('./cmds/call.js'); + await call( functionName, options.args, + options.contractArtifact, + options.contractAddress, + options.from, + options.rpcUrl, + debugLogger, log, ); - - const fnArtifact = getFunctionArtifact(contractArtifact, functionName); - if (fnArtifact.parameters.length !== options.args.length) { - throw Error( - `Invalid number of args passed. Expected ${fnArtifact.parameters.length}; Received: ${options.args.length}`, - ); - } - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const from = await getTxSender(client, options.from); - const result = await client.viewTx(functionName, functionArgs, options.contractAddress, from); - log(format('\nView result: ', result, '\n')); }); program @@ -631,10 +388,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .requiredOption('-n, --note [note...]', 'The members of a Note serialized as hex strings.', []) .addOption(pxeOption) .action(async (address, contractAddress, storageSlot, txHash, options) => { - const note = new Note(parseFields(options.note)); - const extendedNote = new ExtendedNote(note, address, contractAddress, storageSlot, txHash); - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - await client.addNote(extendedNote); + const { addNote } = await import('./cmds/add_note.js'); + await addNote(address, contractAddress, storageSlot, txHash, options.note, options.rpcUrl, debugLogger); }); // Helper for users to decode hex strings into structs if needed. @@ -648,17 +403,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { ) .requiredOption('-p, --parameter ', 'The name of the struct parameter to decode into') .action(async (encodedString, options) => { - const contractArtifact = await getContractArtifact(options.contractArtifact, log); - const parameterAbitype = contractArtifact.functions - .map(({ parameters }) => parameters) - .flat() - .find(({ name, type }) => name === options.parameter && type.kind === 'struct'); - if (!parameterAbitype) { - log(`No struct parameter found with name ${options.parameter}`); - return; - } - const data = parseStructString(encodedString, parameterAbitype.type as StructType); - log(`\nStruct Data: \n${JsonStringify(data, true)}\n`); + const { parseParameterStruct } = await import('./cmds/parse_parameter_struct.js'); + await parseParameterStruct(encodedString, options.contractArtifact, options.parameter, log); }); program @@ -666,18 +412,16 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the current Aztec L2 block number.') .addOption(pxeOption) .action(async (options: any) => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const num = await client.getBlockNumber(); - log(`${num}\n`); + const { blockNumber } = await import('./cmds/block_number.js'); + await blockNumber(options.rpcUrl, debugLogger, log); }); program .command('example-contracts') .description('Lists the example contracts available to deploy from @aztec/noir-contracts') .action(async () => { - const abisList = await getExampleContractArtifacts(); - const names = Object.keys(abisList); - names.forEach(name => log(name)); + const { exampleContracts } = await import('./cmds/example_contracts.js'); + await exampleContracts(log); }); program @@ -691,8 +435,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { 'Local directory to unbox source folder to (relative or absolute), optional - defaults to `/`', ) .action(async (contractName, localDirectory) => { - const unboxTo: string = localDirectory ? localDirectory : contractName; - await unboxContract(contractName, unboxTo, cliVersion, log); + const { unbox } = await import('./cmds/unbox.js'); + await unbox(contractName, localDirectory, cliVersion, log); }); program @@ -700,14 +444,8 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .description('Gets the information of an aztec node at a URL.') .addOption(pxeOption) .action(async options => { - const client = await createCompatibleClient(options.rpcUrl, debugLogger); - const info = await client.getNodeInfo(); - log(`\nNode Info:\n`); - log(`Sandbox Version: ${info.sandboxVersion}\n`); - log(`Compatible Nargo Version: ${info.compatibleNargoVersion}\n`); - log(`Chain Id: ${info.chainId}\n`); - log(`Protocol Version: ${info.protocolVersion}\n`); - log(`Rollup Address: ${info.l1ContractAddresses.rollupAddress.toString()}`); + const { getNodeInfo } = await import('./cmds/get_node_info.js'); + await getNodeInfo(options.rpcUrl, debugLogger, log); }); program @@ -718,30 +456,17 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { `A compiled Noir contract's artifact in JSON format or name of a contract artifact exported by @aztec/noir-contracts`, ) .action(async (contractArtifactFile: string) => { - const contractArtifact = await getContractArtifact(contractArtifactFile, debugLogger); - const contractFns = contractArtifact.functions.filter( - f => !f.isInternal && f.name !== 'compute_note_hash_and_nullifier', - ); - if (contractFns.length === 0) { - log(`No external functions found for contract ${contractArtifact.name}`); - } - for (const fn of contractFns) { - const signatureWithParameterNames = decodeFunctionSignatureWithParameterNames(fn.name, fn.parameters); - const signature = decodeFunctionSignature(fn.name, fn.parameters); - const selector = FunctionSelector.fromSignature(signature); - log( - `${fn.functionType} ${signatureWithParameterNames} \n\tfunction signature: ${signature}\n\tselector: ${selector}`, - ); - } + const { inspectContract } = await import('./cmds/inspect_contract.js'); + await inspectContract(contractArtifactFile, debugLogger, log); }); program .command('compute-selector') .description('Given a function signature, it computes a selector') .argument('', 'Function signature to compute selector for e.g. foo(Field)') - .action((functionSignature: string) => { - const selector = FunctionSelector.fromSignature(functionSignature); - log(`${selector}`); + .action(async (functionSignature: string) => { + const { computeSelector } = await import('./cmds/compute_selector.js'); + computeSelector(functionSignature, log); }); program @@ -752,13 +477,11 @@ export function getProgram(log: LogFn, debugLogger: DebugLogger): Command { .option('--sandbox-version ', 'The sandbox version to update to. Defaults to latest', 'latest') .addOption(pxeOption) .action(async (projectPath: string, options) => { - const { contract } = options; - await update(projectPath, contract, options.rpcUrl, options.sandboxVersion, log, debugLogger); + const { update } = await import('./update/update.js'); + await update(projectPath, options.contract, options.rpcUrl, options.sandboxVersion, log, debugLogger); }); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'generate-typescript', log); - generateNoirInterface(program, 'generate-noir-interface', log); + addNoirCompilerCommanderActions(program, log); return program; } diff --git a/yarn-project/cli/src/utils.ts b/yarn-project/cli/src/utils.ts index 9973d9fa2fc..ed8dd3cc017 100644 --- a/yarn-project/cli/src/utils.ts +++ b/yarn-project/cli/src/utils.ts @@ -1,24 +1,15 @@ -import { AztecAddress, EthAddress, Fr, FunctionSelector, GrumpkinScalar, PXE, Point, TxHash } from '@aztec/aztec.js'; -import { L1ContractArtifactsForDeployment, createEthereumChain, deployL1Contracts } from '@aztec/ethereum'; -import { ContractArtifact } from '@aztec/foundation/abi'; +import { type ContractArtifact, type FunctionArtifact, FunctionSelector } from '@aztec/aztec.js/abi'; +import { AztecAddress } from '@aztec/aztec.js/aztec_address'; +import { EthAddress } from '@aztec/aztec.js/eth_address'; +import { type L1ContractArtifactsForDeployment } from '@aztec/aztec.js/ethereum'; +import { Fr, GrumpkinScalar, Point } from '@aztec/aztec.js/fields'; +import { type PXE } from '@aztec/aztec.js/interfaces/pxe'; +import { LogId } from '@aztec/aztec.js/log_id'; +import { TxHash } from '@aztec/aztec.js/tx_hash'; import { DebugLogger, LogFn } from '@aztec/foundation/log'; -import { - ContractDeploymentEmitterAbi, - ContractDeploymentEmitterBytecode, - InboxAbi, - InboxBytecode, - OutboxAbi, - OutboxBytecode, - RegistryAbi, - RegistryBytecode, - RollupAbi, - RollupBytecode, -} from '@aztec/l1-artifacts'; -import { LogId } from '@aztec/types'; import { CommanderError, InvalidArgumentError } from 'commander'; -import fs from 'fs'; -import { mnemonicToAccount, privateKeyToAccount } from 'viem/accounts'; +import { readFile, rename, writeFile } from 'fs/promises'; import { encodeArgs } from './encoding.js'; @@ -35,7 +26,7 @@ interface ArtifactsType { * @param fnName - Function name to be found. * @returns The function's ABI. */ -export function getFunctionArtifact(artifact: ContractArtifact, fnName: string) { +export function getFunctionArtifact(artifact: ContractArtifact, fnName: string): FunctionArtifact { const fn = artifact.functions.find(({ name }) => name === fnName); if (!fn) { throw Error(`Function ${fnName} not found in contract ABI.`); @@ -57,6 +48,21 @@ export async function deployAztecContracts( mnemonic: string, debugLogger: DebugLogger, ) { + const { + ContractDeploymentEmitterAbi, + ContractDeploymentEmitterBytecode, + InboxAbi, + InboxBytecode, + OutboxAbi, + OutboxBytecode, + RegistryAbi, + RegistryBytecode, + RollupAbi, + RollupBytecode, + } = await import('@aztec/l1-artifacts'); + const { createEthereumChain, deployL1Contracts } = await import('@aztec/ethereum'); + const { mnemonicToAccount, privateKeyToAccount } = await import('viem/accounts'); + const account = !privateKey ? mnemonicToAccount(mnemonic!) : privateKeyToAccount(`0x${privateKey}`); const chain = createEthereumChain(rpcUrl, apiKey); const l1Artifacts: L1ContractArtifactsForDeployment = { @@ -107,7 +113,7 @@ export async function getContractArtifact(fileDir: string, log: LogFn) { } try { - contents = fs.readFileSync(fileDir, 'utf8'); + contents = await readFile(fileDir, 'utf8'); } catch { throw Error(`Contract ${fileDir} not found`); } @@ -412,11 +418,11 @@ export function parseFields(fields: string[]): Fr[] { export async function atomicUpdateFile(filePath: string, contents: string) { const tmpFilepath = filePath + '.tmp'; try { - await fs.promises.writeFile(tmpFilepath, contents, { + await writeFile(tmpFilepath, contents, { // let's crash if the tmp file already exists flag: 'wx', }); - await fs.promises.rename(tmpFilepath, filePath); + await rename(tmpFilepath, filePath); } catch (e) { if (e instanceof Error && 'code' in e && e.code === 'EEXIST') { const commanderError = new CommanderError( diff --git a/yarn-project/deploy_npm.sh b/yarn-project/deploy_npm.sh index 2bd497870f4..04f6ab46841 100755 --- a/yarn-project/deploy_npm.sh +++ b/yarn-project/deploy_npm.sh @@ -2,18 +2,15 @@ [ -n "${BUILD_SYSTEM_DEBUG:-}" ] && set -x # conditionally trace set -eu -# Check we're on a release flow. -if [ -z "$COMMIT_TAG" ] && [ ! "$DRY_DEPLOY" -eq 1 ]; then - echo "Not on a release flow, skipping deploy." +if [ -z "$COMMIT_TAG" ]; then + echo "No commit tag, not deploying to npm." exit 0 fi -extract_repo yarn-project /usr/src project +extract_repo yarn-project-prod /usr/src project cd project/src/yarn-project echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >.npmrc -# also copy npcrc into the l1-contracts directory -cp .npmrc ../l1-contracts # This is to be used with the 'canary' tag for testing, and then 'latest' for making it public DIST_TAG=${1:-"latest"} @@ -71,12 +68,8 @@ function deploy_package() { fi fi - # Back to root - if [ "$REPOSITORY" == "../l1-contracts" ]; then - cd ../yarn-project - else - cd .. - fi + # Return to root + cd .. } # New packages here should be added after the last package that they depend on @@ -100,4 +93,3 @@ deploy_package world-state deploy_package sequencer-client deploy_package aztec-node deploy_package aztec-sandbox -deploy_package ../l1-contracts diff --git a/yarn-project/end-to-end/Dockerfile b/yarn-project/end-to-end/Dockerfile index 915eb134e5e..d65eda2b13a 100644 --- a/yarn-project/end-to-end/Dockerfile +++ b/yarn-project/end-to-end/Dockerfile @@ -6,11 +6,11 @@ RUN yarn build:web WORKDIR /usr/src/yarn-project/end-to-end # Productionify. See comment in yarn-project-base/Dockerfile. -RUN yarn cache clean && yarn workspaces focus --production +RUN yarn workspaces focus --production && yarn cache clean # Create final, minimal size image. # TODO: Not very minimal as chromium adds about 500MB of bloat :/ Separate or install at test runtime? -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache \ jq \ bash \ @@ -30,5 +30,4 @@ COPY --from=builder /usr/src/yarn-project/aztec.js/dest/main.js /usr/src/yarn-pr WORKDIR /usr/src/yarn-project/end-to-end -ENV NODE_OPTIONS=--preserve-symlinks ENTRYPOINT ["yarn", "test"] diff --git a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts index d2b5d7aac34..56807979809 100644 --- a/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts +++ b/yarn-project/end-to-end/src/e2e_inclusion_proofs_contract.test.ts @@ -46,7 +46,8 @@ describe('e2e_inclusion_proofs_contract', () => { { // Prove note inclusion in a given block. - // We prove the note existence at current block number because we don't currently have historical data + // TODO: Use here note block number from the creation note tx to test archival node. This is currently not + // possible because of issue #3564 const blockNumber = await pxe.getBlockNumber(); const ignoredCommitment = 0; // Not ignored only when the note doesn't exist await contract.methods.proveNoteInclusion(owner, blockNumber, ignoredCommitment).send().wait(); @@ -54,7 +55,8 @@ describe('e2e_inclusion_proofs_contract', () => { { // Prove that the note has not been nullified - // We prove the note existence at current block number because we don't currently have historical data + // TODO: Use here note block number from the creation note tx to test archival node. This is currently not + // possible because of issue #3564 const blockNumber = await pxe.getBlockNumber(); const ignoredNullifier = 0; // Not ignored only when the note doesn't exist await contract.methods.proveNullifierNonInclusion(owner, blockNumber, ignoredNullifier).send().wait(); diff --git a/yarn-project/end-to-end/src/shared/browser.ts b/yarn-project/end-to-end/src/shared/browser.ts index 72b064bc903..9c7cfdfc154 100644 --- a/yarn-project/end-to-end/src/shared/browser.ts +++ b/yarn-project/end-to-end/src/shared/browser.ts @@ -84,8 +84,9 @@ export const browserTestSuite = (setup: () => Server, pageLogger: AztecJs.DebugL }); it('Loads Aztec.js in the browser', async () => { - const generatePublicKeyExists = await page.evaluate(() => { - const { generatePublicKey } = window.AztecJs; + const generatePublicKeyExists = await page.evaluate(async () => { + const { generatePublicKey, init } = window.AztecJs; + await init(); return typeof generatePublicKey === 'function'; }); expect(generatePublicKeyExists).toBe(true); diff --git a/yarn-project/end-to-end/tsconfig.json b/yarn-project/end-to-end/tsconfig.json index 8792db4c3c5..c44790b98d8 100644 --- a/yarn-project/end-to-end/tsconfig.json +++ b/yarn-project/end-to-end/tsconfig.json @@ -55,7 +55,5 @@ "path": "../world-state" } ], - "include": [ - "src" - ] + "include": ["src"] } diff --git a/yarn-project/ethereum/src/testnet.ts b/yarn-project/ethereum/src/testnet.ts index 4c3694f9361..83deeddc164 100644 --- a/yarn-project/ethereum/src/testnet.ts +++ b/yarn-project/ethereum/src/testnet.ts @@ -2,11 +2,11 @@ import { Chain } from 'viem'; import { EthereumChain } from './ethereum_chain.js'; -const { DEPLOY_TAG = 'aztec-dev' } = process.env; +const { DEPLOY_TAG = 'aztec-dev', CHAIN_ID = 31337 } = process.env; export const createTestnetChain = (apiKey: string) => { const chain: Chain = { - id: 677868, + id: +CHAIN_ID, name: 'testnet', network: 'aztec', nativeCurrency: { diff --git a/yarn-project/foundation/package.json b/yarn-project/foundation/package.json index 372c031ef39..e6e8c8e66c3 100644 --- a/yarn-project/foundation/package.json +++ b/yarn-project/foundation/package.json @@ -28,6 +28,7 @@ "./sleep": "./dest/sleep/index.js", "./timer": "./dest/timer/index.js", "./transport": "./dest/transport/index.js", + "./trees": "./dest/trees/index.js", "./wasm": "./dest/wasm/index.js", "./worker": "./dest/worker/index.js", "./bigint-buffer": "./dest/bigint-buffer/index.js", diff --git a/yarn-project/foundation/src/abi/function_selector.ts b/yarn-project/foundation/src/abi/function_selector.ts index b898519d53e..df53ef9a1fb 100644 --- a/yarn-project/foundation/src/abi/function_selector.ts +++ b/yarn-project/foundation/src/abi/function_selector.ts @@ -120,7 +120,7 @@ export class FunctionSelector { static fromString(selector: string) { const buf = Buffer.from(selector.replace(/^0x/i, ''), 'hex'); if (buf.length !== FunctionSelector.SIZE) { - throw new Error(`Invalid length ${buf.length}.`); + throw new Error(`Invalid FunctionSelector length ${buf.length}.`); } return FunctionSelector.fromBuffer(buf); } diff --git a/yarn-project/foundation/src/aztec-address/index.ts b/yarn-project/foundation/src/aztec-address/index.ts index 4c042ad96c6..a03257f36de 100644 --- a/yarn-project/foundation/src/aztec-address/index.ts +++ b/yarn-project/foundation/src/aztec-address/index.ts @@ -10,7 +10,7 @@ import { Fr } from '../fields/index.js'; export class AztecAddress extends Fr { constructor(buffer: Buffer) { if (buffer.length !== 32) { - throw new Error(`Invalid length ${buffer.length}.`); + throw new Error(`Invalid AztecAddress length ${buffer.length}.`); } super(buffer); } diff --git a/yarn-project/foundation/src/crypto/index.ts b/yarn-project/foundation/src/crypto/index.ts index 98abed1dacd..f574fb4d2f5 100644 --- a/yarn-project/foundation/src/crypto/index.ts +++ b/yarn-project/foundation/src/crypto/index.ts @@ -1,4 +1,16 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + export * from './keccak/index.js'; export * from './random/index.js'; export * from './sha256/index.js'; export * from './pedersen/index.js'; + +/** + * Init the bb singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. + * It takes about 100-200ms to initialize. It may not seem like much, but when in conjunction with many other things + * initializing, developers may want to pick precisely when to incur this cost. + * If in a test environment, we'll just do it on module load. + */ +export async function init() { + await BarretenbergSync.initSingleton(); +} diff --git a/yarn-project/foundation/src/crypto/pedersen/index.test.ts b/yarn-project/foundation/src/crypto/pedersen/index.test.ts index 1d152a917d0..412b07ec962 100644 --- a/yarn-project/foundation/src/crypto/pedersen/index.test.ts +++ b/yarn-project/foundation/src/crypto/pedersen/index.test.ts @@ -1,7 +1,13 @@ +import { BarretenbergSync } from '@aztec/bb.js'; + import { toBufferBE } from '../../bigint-buffer/index.js'; import { pedersenCommit, pedersenHash, pedersenHashBuffer } from './index.js'; describe('pedersen', () => { + beforeAll(async () => { + await BarretenbergSync.initSingleton(); + }); + it('pedersen commit', () => { const r = pedersenCommit([toBufferBE(1n, 32), toBufferBE(1n, 32)]); expect(r).toEqual([ diff --git a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts index 2a117ea5519..6793b368c8f 100644 --- a/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts +++ b/yarn-project/foundation/src/crypto/pedersen/pedersen.wasm.ts @@ -1,9 +1,5 @@ import { BarretenbergSync, Fr } from '@aztec/bb.js'; -// Get the singleton. This constructs (if not already) the barretenberg sync api within bb.js itself. -// This can be called from multiple other modules as needed, and it ensures it's only constructed once. -const api = await BarretenbergSync.getSingleton(); - /** * Create a pedersen commitment (point) from an array of input fields. * Left pads any inputs less than 32 bytes. @@ -13,7 +9,7 @@ export function pedersenCommit(input: Buffer[]) { throw new Error('All input buffers must be <= 32 bytes.'); } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); - const point = api.pedersenCommit(input.map(i => new Fr(i))); + const point = BarretenbergSync.getSingleton().pedersenCommit(input.map(i => new Fr(i))); // toBuffer returns Uint8Arrays (browser/worker-boundary friendly). // TODO: rename toTypedArray()? return [Buffer.from(point.x.toBuffer()), Buffer.from(point.y.toBuffer())]; @@ -29,7 +25,7 @@ export function pedersenHash(input: Buffer[], index = 0) { } input = input.map(i => (i.length < 32 ? Buffer.concat([Buffer.alloc(32 - i.length, 0), i]) : i)); return Buffer.from( - api + BarretenbergSync.getSingleton() .pedersenHash( input.map(i => new Fr(i)), index, @@ -42,5 +38,5 @@ export function pedersenHash(input: Buffer[], index = 0) { * Create a pedersen hash from an arbitrary length buffer. */ export function pedersenHashBuffer(input: Buffer, index = 0) { - return Buffer.from(api.pedersenHashBuffer(input, index).toBuffer()); + return Buffer.from(BarretenbergSync.getSingleton().pedersenHashBuffer(input, index).toBuffer()); } diff --git a/yarn-project/foundation/src/index.ts b/yarn-project/foundation/src/index.ts index c75ef10e563..7e06583f10d 100644 --- a/yarn-project/foundation/src/index.ts +++ b/yarn-project/foundation/src/index.ts @@ -21,6 +21,7 @@ export * as serialize from './serialize/index.js'; export * as sleep from './sleep/index.js'; export * as timer from './timer/index.js'; export * as transport from './transport/index.js'; +export * as trees from './trees/index.js'; export * as types from './types/index.js'; export * as url from './url/index.js'; export * as wasm from './wasm/index.js'; diff --git a/yarn-project/foundation/src/json-rpc/server/index.ts b/yarn-project/foundation/src/json-rpc/server/index.ts index 8495233b59d..9a32d317b08 100644 --- a/yarn-project/foundation/src/json-rpc/server/index.ts +++ b/yarn-project/foundation/src/json-rpc/server/index.ts @@ -1,2 +1,2 @@ -export { JsonRpcServer } from './json_rpc_server.js'; +export { JsonRpcServer, createStatusRouter } from './json_rpc_server.js'; export { JsonProxy } from './json_proxy.js'; diff --git a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts index 722f0c925f8..64651ea30bf 100644 --- a/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts +++ b/yarn-project/foundation/src/json-rpc/server/json_rpc_server.ts @@ -188,3 +188,16 @@ export class JsonRpcServer { httpServer.listen(port); } } + +/** + * Creates a router for handling a plain status request that will return 200 status when running. + * @param apiPrefix - The prefix to use for all api requests + * @returns - The router for handling status requests. + */ +export function createStatusRouter(apiPrefix = '') { + const router = new Router({ prefix: `${apiPrefix}` }); + router.get('/status', (ctx: Koa.Context) => { + ctx.status = 200; + }); + return router; +} diff --git a/yarn-project/foundation/src/trees/index.ts b/yarn-project/foundation/src/trees/index.ts new file mode 100644 index 00000000000..030a59f2570 --- /dev/null +++ b/yarn-project/foundation/src/trees/index.ts @@ -0,0 +1,48 @@ +/** + * A leaf of an indexed merkle tree. + */ +export interface IndexedTreeLeaf { + /** + * Returns key of the leaf. It's used for indexing. + */ + getKey(): bigint; + /** + * Serializes the leaf into a buffer. + */ + toBuffer(): Buffer; + /** + * Returns true if the leaf is empty. + */ + isEmpty(): boolean; +} + +/** + * Preimage of an indexed merkle tree leaf. + */ +export interface IndexedTreeLeafPreimage { + /** + * Returns key of the leaf corresponding to this preimage. + */ + getKey(): bigint; + /** + * Returns the key of the next leaf. + */ + getNextKey(): bigint; + /** + * Returns the index of the next leaf. + */ + getNextIndex(): bigint; + + /** + * Returns the preimage as a leaf. + */ + asLeaf(): IndexedTreeLeaf; + /** + * Serializes the preimage into a buffer. + */ + toBuffer(): Buffer; + /** + * Serializes the preimage to an array of buffers for hashing. + */ + toHashInputs(): Buffer[]; +} diff --git a/yarn-project/merkle-tree/package.json b/yarn-project/merkle-tree/package.json index 6418d219963..4c7b53d8f42 100644 --- a/yarn-project/merkle-tree/package.json +++ b/yarn-project/merkle-tree/package.json @@ -40,6 +40,7 @@ "tslib": "^2.4.0" }, "devDependencies": { + "@aztec/circuits.js": "workspace:^", "@jest/globals": "^29.5.0", "@types/jest": "^29.5.0", "@types/levelup": "^5.1.2", diff --git a/yarn-project/merkle-tree/src/index.ts b/yarn-project/merkle-tree/src/index.ts index 5181cecfc15..68826f44e42 100644 --- a/yarn-project/merkle-tree/src/index.ts +++ b/yarn-project/merkle-tree/src/index.ts @@ -4,7 +4,7 @@ export * from './interfaces/merkle_tree.js'; export * from './interfaces/update_only_tree.js'; export * from './pedersen.js'; export * from './sparse_tree/sparse_tree.js'; -export { LowLeafWitnessData, StandardIndexedTree } from './standard_indexed_tree/standard_indexed_tree.js'; +export { StandardIndexedTree } from './standard_indexed_tree/standard_indexed_tree.js'; export * from './standard_tree/standard_tree.js'; export { INITIAL_LEAF } from './tree_base.js'; export { newTree } from './new_tree.js'; diff --git a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts index 46c13f49bd9..eee22a3ee2a 100644 --- a/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/indexed_tree.ts @@ -1,8 +1,26 @@ -import { LeafData, SiblingPath } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { SiblingPath } from '@aztec/types'; -import { LowLeafWitnessData } from '../index.js'; import { AppendOnlyTree } from './append_only_tree.js'; +/** + * All of the data to be return during batch insertion. + */ +export interface LowLeafWitnessData { + /** + * Preimage of the low nullifier that proves non membership. + */ + leafPreimage: IndexedTreeLeafPreimage; + /** + * Sibling path to prove membership of low nullifier. + */ + siblingPath: SiblingPath; + /** + * The index of low nullifier. + */ + index: bigint; +} + /** * The result of a batch insertion in an indexed merkle tree. */ @@ -35,27 +53,30 @@ export interface IndexedTree extends AppendOnlyTree { * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue( + findIndexOfPreviousKey( newValue: bigint, includeUncommitted: boolean, - ): { - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }; + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + >; /** - * Gets the latest LeafData copy. - * @param index - Index of the leaf of which to obtain the LeafData copy. + * Gets the latest LeafPreimage copy. + * @param index - Index of the leaf of which to obtain the LeafPreimage copy. * @param includeUncommitted - If true, the uncommitted changes are included in the search. - * @returns A copy of the leaf data at the given index or undefined if the leaf was not found. + * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - getLatestLeafDataCopy(index: number, includeUncommitted: boolean): LeafData | undefined; + getLatestLeafPreimageCopy(index: bigint, includeUncommitted: boolean): Promise; /** * Batch insert multiple leaves into the tree. diff --git a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts index e4f65b326a2..ba3ffb4309b 100644 --- a/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/merkle_tree.ts @@ -49,4 +49,12 @@ export interface MerkleTree extends SiblingPathSource { * @param includeUncommitted - Set to true to include uncommitted updates in the data set. */ getLeafValue(index: bigint, includeUncommitted: boolean): Promise; + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param leaf - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + findLeafIndex(leaf: Buffer, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts b/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts index 6bd5c024d0c..06ce3a24096 100644 --- a/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts +++ b/yarn-project/merkle-tree/src/interfaces/update_only_tree.ts @@ -1,5 +1,3 @@ -import { LeafData } from '@aztec/types'; - import { TreeSnapshotBuilder } from '../snapshots/snapshot_builder.js'; import { MerkleTree } from './merkle_tree.js'; @@ -12,6 +10,5 @@ export interface UpdateOnlyTree extends MerkleTree, TreeSnapshotBuilder { * @param leaf - The leaf value to be updated. * @param index - The leaf to be updated. */ - // TODO: Make this strictly a Buffer - updateLeaf(leaf: Buffer | LeafData, index: bigint): Promise; + updateLeaf(leaf: Buffer, index: bigint): Promise; } diff --git a/yarn-project/merkle-tree/src/load_tree.ts b/yarn-project/merkle-tree/src/load_tree.ts index baabe852735..9753a2d528d 100644 --- a/yarn-project/merkle-tree/src/load_tree.ts +++ b/yarn-project/merkle-tree/src/load_tree.ts @@ -13,14 +13,14 @@ import { TreeBase, decodeMeta } from './tree_base.js'; * @returns The newly created tree. */ export async function loadTree( - c: new (...args: any[]) => T, + c: new (db: LevelUp, hasher: Hasher, name: string, depth: number, size: bigint, root: Buffer) => T, db: LevelUp, hasher: Hasher, name: string, ): Promise { const meta: Buffer = await db.get(name); const { root, depth, size } = decodeMeta(meta); + const tree = new c(db, hasher, name, depth, size, root); - await tree.initFromDb(); return tree; } diff --git a/yarn-project/merkle-tree/src/new_tree.ts b/yarn-project/merkle-tree/src/new_tree.ts index f1cd4c2d3b5..1395d012d25 100644 --- a/yarn-project/merkle-tree/src/new_tree.ts +++ b/yarn-project/merkle-tree/src/new_tree.ts @@ -15,14 +15,14 @@ import { TreeBase } from './tree_base.js'; * @returns The newly created tree. */ export async function newTree( - c: new (...args: any[]) => T, + c: new (db: LevelUp, hasher: Hasher, name: string, depth: number, size: bigint) => T, db: LevelUp, hasher: Hasher, name: string, depth: number, prefilledSize = 1, ): Promise { - const tree = new c(db, hasher, name, depth, 0n, undefined); + const tree = new c(db, hasher, name, depth, 0n); await tree.init(prefilledSize); return tree; } diff --git a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts index b530e981b27..46361bd5913 100644 --- a/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/append_only_snapshot.ts @@ -229,4 +229,15 @@ class AppendOnlySnapshot implements TreeSnapshot { return undefined; } } + + async findLeafIndex(value: Buffer): Promise { + const numLeaves = this.getNumLeaves(); + for (let i = 0n; i < numLeaves; i++) { + const currentValue = await this.getLeafValue(i); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts index d77204beafa..b1157cf9d44 100644 --- a/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/base_full_snapshot.ts @@ -65,7 +65,7 @@ export abstract class BaseFullTreeSnapshotBuilder depth) { // short circuit if we've reached the leaf level // otherwise getNode might throw if we ask for the children of a leaf - this.handleLeaf(i, node, batch); + await this.handleLeaf(i, node, batch); continue; } @@ -98,7 +98,7 @@ export abstract class BaseFullTreeSnapshotBuilder { @@ -218,4 +218,15 @@ export class BaseFullTreeSnapshot implements TreeSnapshot { path.reverse(); return path; } + + async findLeafIndex(value: Buffer): Promise { + const numLeaves = this.getNumLeaves(); + for (let i = 0n; i < numLeaves; i++) { + const currentValue = await this.getLeafValue(i); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts index 631531fcfc2..3846bbcc21d 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.test.ts @@ -1,3 +1,6 @@ +import { Fr, NullifierLeaf, NullifierLeafPreimage } from '@aztec/circuits.js'; +import { Hasher } from '@aztec/types'; + import levelup, { LevelUp } from 'levelup'; import { Pedersen, newTree } from '../index.js'; @@ -6,6 +9,12 @@ import { createMemDown } from '../test/utils/create_mem_down.js'; import { IndexedTreeSnapshotBuilder } from './indexed_tree_snapshot.js'; import { describeSnapshotBuilderTestSuite } from './snapshot_builder_test_suite.js'; +class NullifierTree extends StandardIndexedTreeWithAppend { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + describe('IndexedTreeSnapshotBuilder', () => { let db: LevelUp; let tree: StandardIndexedTreeWithAppend; @@ -13,15 +22,15 @@ describe('IndexedTreeSnapshotBuilder', () => { beforeEach(async () => { db = levelup(createMemDown()); - tree = await newTree(StandardIndexedTreeWithAppend, db, new Pedersen(), 'test', 4); - snapshotBuilder = new IndexedTreeSnapshotBuilder(db, tree); + tree = await newTree(NullifierTree, db, new Pedersen(), 'test', 4); + snapshotBuilder = new IndexedTreeSnapshotBuilder(db, tree, NullifierLeafPreimage); }); describeSnapshotBuilderTestSuite( () => tree, () => snapshotBuilder, async () => { - const newLeaves = Array.from({ length: 2 }).map(() => Buffer.from(Math.random().toString())); + const newLeaves = Array.from({ length: 2 }).map(() => new NullifierLeaf(Fr.random()).toBuffer()); await tree.appendLeaves(newLeaves); }, ); @@ -31,14 +40,14 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('a'), Buffer.from('b'), Buffer.from('c')]); await tree.commit(); const expectedLeavesAtBlock1 = await Promise.all([ - tree.getLatestLeafDataCopy(0, false), - tree.getLatestLeafDataCopy(1, false), - tree.getLatestLeafDataCopy(2, false), + tree.getLatestLeafPreimageCopy(0n, false), + tree.getLatestLeafPreimageCopy(1n, false), + tree.getLatestLeafPreimageCopy(2n, false), // id'expect these to be undefined, but leaf 3 isn't? // must be some indexed-tree quirk I don't quite understand yet - tree.getLatestLeafDataCopy(3, false), - tree.getLatestLeafDataCopy(4, false), - tree.getLatestLeafDataCopy(5, false), + tree.getLatestLeafPreimageCopy(3n, false), + tree.getLatestLeafPreimageCopy(4n, false), + tree.getLatestLeafPreimageCopy(5n, false), ]); await snapshotBuilder.snapshot(1); @@ -46,35 +55,35 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('d'), Buffer.from('e'), Buffer.from('f')]); await tree.commit(); const expectedLeavesAtBlock2 = await Promise.all([ - tree.getLatestLeafDataCopy(0, false), - tree.getLatestLeafDataCopy(1, false), - tree.getLatestLeafDataCopy(2, false), - tree.getLatestLeafDataCopy(3, false), - tree.getLatestLeafDataCopy(4, false), - tree.getLatestLeafDataCopy(5, false), + tree.getLatestLeafPreimageCopy(0n, false), + tree.getLatestLeafPreimageCopy(1n, false), + tree.getLatestLeafPreimageCopy(2n, false), + tree.getLatestLeafPreimageCopy(3n, false), + tree.getLatestLeafPreimageCopy(4n, false), + tree.getLatestLeafPreimageCopy(5n, false), ]); await snapshotBuilder.snapshot(2); const snapshot1 = await snapshotBuilder.getSnapshot(1); const actualLeavesAtBlock1 = await Promise.all([ - snapshot1.getLatestLeafDataCopy(0n), - snapshot1.getLatestLeafDataCopy(1n), - snapshot1.getLatestLeafDataCopy(2n), - snapshot1.getLatestLeafDataCopy(3n), - snapshot1.getLatestLeafDataCopy(4n), - snapshot1.getLatestLeafDataCopy(5n), + snapshot1.getLatestLeafPreimageCopy(0n), + snapshot1.getLatestLeafPreimageCopy(1n), + snapshot1.getLatestLeafPreimageCopy(2n), + snapshot1.getLatestLeafPreimageCopy(3n), + snapshot1.getLatestLeafPreimageCopy(4n), + snapshot1.getLatestLeafPreimageCopy(5n), ]); expect(actualLeavesAtBlock1).toEqual(expectedLeavesAtBlock1); const snapshot2 = await snapshotBuilder.getSnapshot(2); const actualLeavesAtBlock2 = await Promise.all([ - snapshot2.getLatestLeafDataCopy(0n), - snapshot2.getLatestLeafDataCopy(1n), - snapshot2.getLatestLeafDataCopy(2n), - snapshot2.getLatestLeafDataCopy(3n), - snapshot2.getLatestLeafDataCopy(4n), - snapshot2.getLatestLeafDataCopy(5n), + snapshot2.getLatestLeafPreimageCopy(0n), + snapshot2.getLatestLeafPreimageCopy(1n), + snapshot2.getLatestLeafPreimageCopy(2n), + snapshot2.getLatestLeafPreimageCopy(3n), + snapshot2.getLatestLeafPreimageCopy(4n), + snapshot2.getLatestLeafPreimageCopy(5n), ]); expect(actualLeavesAtBlock2).toEqual(expectedLeavesAtBlock2); }); @@ -85,12 +94,12 @@ describe('IndexedTreeSnapshotBuilder', () => { await tree.appendLeaves([Buffer.from('a'), Buffer.from('f'), Buffer.from('d')]); await tree.commit(); const snapshot = await snapshotBuilder.snapshot(1); - const historicalPrevValue = tree.findIndexOfPreviousValue(2n, false); + const historicalPrevValue = await tree.findIndexOfPreviousKey(2n, false); await tree.appendLeaves([Buffer.from('c'), Buffer.from('b'), Buffer.from('e')]); await tree.commit(); - await expect(snapshot.findIndexOfPreviousValue(2n)).resolves.toEqual(historicalPrevValue); + await expect(snapshot.findIndexOfPreviousKey(2n)).resolves.toEqual(historicalPrevValue); }); }); }); diff --git a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts index 6725bd394e5..28aeefdc953 100644 --- a/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts +++ b/yarn-project/merkle-tree/src/snapshots/indexed_tree_snapshot.ts @@ -1,10 +1,9 @@ -import { toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { LeafData } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { LevelUp, LevelUpChain } from 'levelup'; import { IndexedTree } from '../interfaces/indexed_tree.js'; -import { decodeTreeValue, encodeTreeValue } from '../standard_indexed_tree/standard_indexed_tree.js'; +import { PreimageFactory } from '../standard_indexed_tree/standard_indexed_tree.js'; import { TreeBase } from '../tree_base.js'; import { BaseFullTreeSnapshot, BaseFullTreeSnapshotBuilder } from './base_full_snapshot.js'; import { IndexedTreeSnapshot, TreeSnapshotBuilder } from './snapshot_builder.js'; @@ -17,44 +16,54 @@ export class IndexedTreeSnapshotBuilder extends BaseFullTreeSnapshotBuilder implements TreeSnapshotBuilder { - constructor(db: LevelUp, tree: IndexedTree & TreeBase) { + constructor(db: LevelUp, tree: IndexedTree & TreeBase, private leafPreimageBuilder: PreimageFactory) { super(db, tree); } protected openSnapshot(root: Buffer, numLeaves: bigint): IndexedTreeSnapshot { - return new IndexedTreeSnapshotImpl(this.db, root, numLeaves, this.tree); + return new IndexedTreeSnapshotImpl(this.db, root, numLeaves, this.tree, this.leafPreimageBuilder); } - protected handleLeaf(index: bigint, node: Buffer, batch: LevelUpChain) { - const leafData = this.tree.getLatestLeafDataCopy(Number(index), false); - if (leafData) { - batch.put(snapshotLeafValue(node, index), encodeTreeValue(leafData)); + protected async handleLeaf(index: bigint, node: Buffer, batch: LevelUpChain) { + const leafPreimage = await this.tree.getLatestLeafPreimageCopy(index, false); + if (leafPreimage) { + batch.put(snapshotLeafValue(node, index), leafPreimage.toBuffer()); } } } /** A snapshot of an indexed tree at a particular point in time */ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTreeSnapshot { + constructor( + db: LevelUp, + historicRoot: Buffer, + numLeaves: bigint, + tree: IndexedTree & TreeBase, + private leafPreimageBuilder: PreimageFactory, + ) { + super(db, historicRoot, numLeaves, tree); + } + async getLeafValue(index: bigint): Promise { - const leafData = await this.getLatestLeafDataCopy(index); - return leafData ? toBufferBE(leafData.value, 32) : undefined; + const leafPreimage = await this.getLatestLeafPreimageCopy(index); + return leafPreimage?.toBuffer(); } - async getLatestLeafDataCopy(index: bigint): Promise { + async getLatestLeafPreimageCopy(index: bigint): Promise { const leafNode = await super.getLeafValue(index); const leafValue = await this.db.get(snapshotLeafValue(leafNode!, index)).catch(() => undefined); if (leafValue) { - return decodeTreeValue(leafValue); + return this.leafPreimageBuilder.fromBuffer(leafValue); } else { return undefined; } } - async findIndexOfPreviousValue(newValue: bigint): Promise<{ + async findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ - index: number; + index: bigint; /** * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ @@ -65,18 +74,18 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTre for (let i = 0; i < numLeaves; i++) { // this is very inefficient - const storedLeaf = await this.getLatestLeafDataCopy(BigInt(i))!; + const storedLeaf = await this.getLatestLeafPreimageCopy(BigInt(i))!; // The stored leaf can be undefined if it addresses an empty leaf // If the leaf is empty we do the same as if the leaf was larger if (storedLeaf === undefined) { diff.push(newValue); - } else if (storedLeaf.value > newValue) { + } else if (storedLeaf.getKey() > newValue) { diff.push(newValue); - } else if (storedLeaf.value === newValue) { - return { index: i, alreadyPresent: true }; + } else if (storedLeaf.getKey() === newValue) { + return { index: BigInt(i), alreadyPresent: true }; } else { - diff.push(newValue - storedLeaf.value); + diff.push(newValue - storedLeaf.getKey()); } } @@ -87,6 +96,13 @@ class IndexedTreeSnapshotImpl extends BaseFullTreeSnapshot implements IndexedTre } } - return { index: minIndex, alreadyPresent: false }; + return { index: BigInt(minIndex), alreadyPresent: false }; + } + + async findLeafIndex(value: Buffer): Promise { + const index = await this.tree.findLeafIndex(value, false); + if (index !== undefined && index < this.getNumLeaves()) { + return index; + } } } diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts index a6722306301..b1fd74f9bdc 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder.ts @@ -1,4 +1,5 @@ -import { LeafData, SiblingPath } from '@aztec/types'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { SiblingPath } from '@aztec/types'; /** * An interface for a tree that can record snapshots of its contents. @@ -47,6 +48,14 @@ export interface TreeSnapshot { * @param index - The index of the leaf for which a sibling path is required. */ getSiblingPath(index: bigint): Promise>; + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param treeId - The ID of the tree. + * @param value - The leaf value to look for. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + findLeafIndex(value: Buffer): Promise; } /** A snapshot of an indexed tree */ @@ -55,18 +64,18 @@ export interface IndexedTreeSnapshot extends TreeSnapshot { * Gets the historical data for a leaf * @param index - The index of the leaf to get the data for */ - getLatestLeafDataCopy(index: bigint): Promise; + getLatestLeafPreimageCopy(index: bigint): Promise; /** * Finds the index of the largest leaf whose value is less than or equal to the provided value. * @param newValue - The new value to be inserted into the tree. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue(newValue: bigint): Promise<{ + findIndexOfPreviousKey(newValue: bigint): Promise<{ /** * The index of the found leaf. */ - index: number; + index: bigint; /** * A flag indicating if the corresponding leaf's value is equal to `newValue`. */ diff --git a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts index 3b66c36164c..f50ff1d69ae 100644 --- a/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts +++ b/yarn-project/merkle-tree/src/snapshots/snapshot_builder_test_suite.ts @@ -186,6 +186,7 @@ export function describeSnapshotBuilderTestSuite { + it('returns the historical leaf index when the snapshot was taken', async () => { + await modifyTree(tree); + await tree.commit(); + const snapshot = await snapshotBuilder.snapshot(1); + + const initialLastLeafIndex = tree.getNumLeaves(false) - 1n; + let lastLeaf = await tree.getLeafValue(initialLastLeafIndex, false); + expect(await snapshot.findLeafIndex(lastLeaf!)).toBe(initialLastLeafIndex); + + await modifyTree(tree); + await tree.commit(); + + const newLastLeafIndex = tree.getNumLeaves(false) - 1n; + lastLeaf = await tree.getLeafValue(newLastLeafIndex, false); + + expect(await snapshot.findLeafIndex(lastLeaf!)).toBe(undefined); + }); + }); }); } diff --git a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts index 92cdc4152fc..138ca8f21e7 100644 --- a/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts +++ b/yarn-project/merkle-tree/src/sparse_tree/sparse_tree.ts @@ -41,4 +41,8 @@ export class SparseTree extends TreeBase implements UpdateOnlyTree { public getSnapshot(block: number): Promise { return this.#snapshotBuilder.getSnapshot(block); } + + public findLeafIndex(_value: Buffer, _includeUncommitted: boolean): Promise { + throw new Error('Finding leaf index is not supported for sparse trees'); + } } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts index ebbf3a3d0ee..325b438a0f1 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/standard_indexed_tree.ts @@ -1,87 +1,107 @@ import { toBigIntBE, toBufferBE } from '@aztec/foundation/bigint-buffer'; -import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; -import { LeafData, SiblingPath } from '@aztec/types'; - -import { BatchInsertionResult, IndexedTree } from '../interfaces/indexed_tree.js'; -import { IndexedTreeSnapshotBuilder } from '../snapshots/indexed_tree_snapshot.js'; -import { IndexedTreeSnapshot } from '../snapshots/snapshot_builder.js'; +import { IndexedTreeLeaf, IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; +import { Hasher, SiblingPath } from '@aztec/types'; + +import { LevelUp } from 'levelup'; + +import { + BatchInsertionResult, + IndexedTree, + IndexedTreeSnapshot, + IndexedTreeSnapshotBuilder, + LowLeafWitnessData, +} from '../index.js'; import { TreeBase } from '../tree_base.js'; const log = createDebugLogger('aztec:standard-indexed-tree'); -const indexToKeyLeaf = (name: string, index: bigint) => { - return `${name}:leaf:${toBufferBE(index, 32).toString('hex')}`; -}; - -const keyLeafToIndex = (key: string): bigint => { - const index = key.split(':')[2]; - return toBigIntBE(Buffer.from(index, 'hex')); -}; - -const zeroLeaf: LeafData = { - value: 0n, - nextValue: 0n, - nextIndex: 0n, -}; - /** - * All of the data to be return during batch insertion. + * Factory for creating leaf preimages. */ -export interface LowLeafWitnessData { +export interface PreimageFactory { + /** + * Creates a new preimage from a leaf. + * @param leaf - Leaf to create a preimage from. + * @param nextKey - Next key of the leaf. + * @param nextIndex - Next index of the leaf. + */ + fromLeaf(leaf: IndexedTreeLeaf, nextKey: bigint, nextIndex: bigint): IndexedTreeLeafPreimage; + /** + * Creates a new preimage from a buffer. + * @param buffer - Buffer to create a preimage from. + */ + fromBuffer(buffer: Buffer): IndexedTreeLeafPreimage; /** - * Preimage of the low nullifier that proves non membership. + * Creates an empty preimage. */ - leafData: LeafData; + empty(): IndexedTreeLeafPreimage; /** - * Sibling path to prove membership of low nullifier. + * Creates a copy of a preimage. + * @param preimage - Preimage to be cloned. */ - siblingPath: SiblingPath; + clone(preimage: IndexedTreeLeafPreimage): IndexedTreeLeafPreimage; +} + +/** + * Factory for creating leaves. + */ +export interface LeafFactory { + /** + * Creates a new leaf from a buffer. + * @param key - Key of the leaf. + */ + buildDummy(key: bigint): IndexedTreeLeaf; /** - * The index of low nullifier. + * Creates a new leaf from a buffer. + * @param buffer - Buffer to create a leaf from. */ - index: bigint; + fromBuffer(buffer: Buffer): IndexedTreeLeaf; } +export const buildDbKeyForPreimage = (name: string, index: bigint) => { + return `${name}:leaf_by_index:${toBufferBE(index, 32).toString('hex')}`; +}; + +export const buildDbKeyForLeafIndex = (name: string, key: bigint) => { + return `${name}:leaf_index_by_leaf_key:${toBufferBE(key, 32).toString('hex')}`; +}; + /** * Pre-compute empty witness. * @param treeHeight - Height of tree for sibling path. * @returns An empty witness. */ -function getEmptyLowLeafWitness(treeHeight: N): LowLeafWitnessData { +function getEmptyLowLeafWitness( + treeHeight: N, + leafPreimageFactory: PreimageFactory, +): LowLeafWitnessData { return { - leafData: zeroLeaf, + leafPreimage: leafPreimageFactory.empty(), index: 0n, siblingPath: new SiblingPath(treeHeight, Array(treeHeight).fill(toBufferBE(0n, 32))), }; } -export const encodeTreeValue = (leafData: LeafData) => { - const valueAsBuffer = toBufferBE(leafData.value, 32); - const indexAsBuffer = toBufferBE(leafData.nextIndex, 32); - const nextValueAsBuffer = toBufferBE(leafData.nextValue, 32); - return Buffer.concat([valueAsBuffer, indexAsBuffer, nextValueAsBuffer]); -}; - -export const decodeTreeValue = (buf: Buffer) => { - const value = toBigIntBE(buf.subarray(0, 32)); - const nextIndex = toBigIntBE(buf.subarray(32, 64)); - const nextValue = toBigIntBE(buf.subarray(64, 96)); - return { - value, - nextIndex, - nextValue, - } as LeafData; -}; - /** - * Indexed merkle tree. + * Standard implementation of an indexed tree. */ export class StandardIndexedTree extends TreeBase implements IndexedTree { - #snapshotBuilder = new IndexedTreeSnapshotBuilder(this.db, this); - - protected leaves: LeafData[] = []; - protected cachedLeaves: { [key: number]: LeafData } = {}; + #snapshotBuilder = new IndexedTreeSnapshotBuilder(this.db, this, this.leafPreimageFactory); + protected cachedLeafPreimages: { [key: string]: IndexedTreeLeafPreimage } = {}; + + public constructor( + db: LevelUp, + hasher: Hasher, + name: string, + depth: number, + size: bigint = 0n, + protected leafPreimageFactory: PreimageFactory, + protected leafFactory: LeafFactory, + root?: Buffer, + ) { + super(db, hasher, name, depth, size, root); + } /** * Appends the given leaves to the tree. @@ -89,7 +109,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * @returns Empty promise. * @remarks Use batchInsert method instead. */ - public appendLeaves(_leaves: Buffer[]): Promise { + appendLeaves(_leaves: Buffer[]): Promise { throw new Error('Not implemented'); } @@ -117,88 +137,149 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * @param includeUncommitted - Indicates whether to include uncommitted leaves in the computation. * @returns The value of the leaf at the given index or undefined if the leaf is empty. */ - public getLeafValue(index: bigint, includeUncommitted: boolean): Promise { - const leaf = this.getLatestLeafDataCopy(Number(index), includeUncommitted); - if (!leaf) { - return Promise.resolve(undefined); - } - return Promise.resolve(toBufferBE(leaf.value, 32)); + public async getLeafValue(index: bigint, includeUncommitted: boolean): Promise { + const preimage = await this.getLatestLeafPreimageCopy(index, includeUncommitted); + return preimage && preimage.toBuffer(); } /** * Finds the index of the largest leaf whose value is less than or equal to the provided value. - * @param newValue - The new value to be inserted into the tree. + * @param newKey - The new key to be inserted into the tree. * @param includeUncommitted - If true, the uncommitted changes are included in the search. * @returns The found leaf index and a flag indicating if the corresponding leaf's value is equal to `newValue`. */ - findIndexOfPreviousValue( - newValue: bigint, + async findIndexOfPreviousKey( + newKey: bigint, includeUncommitted: boolean, - ): { - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - } { - const numLeaves = this.getNumLeaves(includeUncommitted); - const diff: bigint[] = []; - - for (let i = 0; i < numLeaves; i++) { - const storedLeaf = this.getLatestLeafDataCopy(i, includeUncommitted)!; - - // The stored leaf can be undefined if it addresses an empty leaf - // If the leaf is empty we do the same as if the leaf was larger - if (storedLeaf === undefined) { - diff.push(newValue); - } else if (storedLeaf.value > newValue) { - diff.push(newValue); - } else if (storedLeaf.value === newValue) { - return { index: i, alreadyPresent: true }; - } else { - diff.push(newValue - storedLeaf.value); + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { + let lowLeafIndex = await this.getDbLowLeafIndex(newKey); + let lowLeafPreimage = lowLeafIndex !== undefined ? await this.getDbPreimage(lowLeafIndex) : undefined; + + if (includeUncommitted) { + const cachedLowLeafIndex = this.getCachedLowLeafIndex(newKey); + if (cachedLowLeafIndex !== undefined) { + const cachedLowLeafPreimage = this.getCachedPreimage(cachedLowLeafIndex)!; + if (!lowLeafPreimage || cachedLowLeafPreimage.getKey() > lowLeafPreimage.getKey()) { + lowLeafIndex = cachedLowLeafIndex; + lowLeafPreimage = cachedLowLeafPreimage; + } } } - const minIndex = this.findMinIndex(diff); - return { index: minIndex, alreadyPresent: false }; + + if (lowLeafIndex === undefined || !lowLeafPreimage) { + return undefined; + } + + return { + index: lowLeafIndex, + alreadyPresent: lowLeafPreimage.getKey() === newKey, + }; + } + + private getCachedLowLeafIndex(key: bigint): bigint | undefined { + const indexes = Object.getOwnPropertyNames(this.cachedLeafPreimages); + const lowLeafIndexes = indexes + .map(index => ({ + index: BigInt(index), + key: this.cachedLeafPreimages[index].getKey(), + })) + .filter(({ key: candidateKey }) => candidateKey <= key) + .sort((a, b) => Number(b.key - a.key)); + return lowLeafIndexes[0]?.index; + } + + private getCachedLeafIndex(key: bigint): bigint | undefined { + const index = Object.keys(this.cachedLeafPreimages).find(index => { + return this.cachedLeafPreimages[index].getKey() === key; + }); + if (index) { + return BigInt(index); + } + return undefined; + } + + private async getDbLowLeafIndex(key: bigint): Promise { + return await new Promise((resolve, reject) => { + let lowLeafIndex: bigint | undefined; + this.db + .createReadStream({ + gte: buildDbKeyForLeafIndex(this.getName(), 0n), + lte: buildDbKeyForLeafIndex(this.getName(), key), + limit: 1, + reverse: true, + }) + .on('data', data => { + lowLeafIndex = toBigIntBE(data.value); + }) + .on('close', function () {}) + .on('end', function () { + resolve(lowLeafIndex); + }) + .on('error', function () { + log.error('stream error'); + reject(); + }); + }); + } + + private async getDbPreimage(index: bigint): Promise { + const dbPreimage = await this.db + .get(buildDbKeyForPreimage(this.getName(), index)) + .then(data => this.leafPreimageFactory.fromBuffer(data)) + .catch(() => undefined); + return dbPreimage; + } + + private getCachedPreimage(index: bigint): IndexedTreeLeafPreimage | undefined { + return this.cachedLeafPreimages[index.toString()]; } /** - * Gets the latest LeafData copy. - * @param index - Index of the leaf of which to obtain the LeafData copy. + * Gets the latest LeafPreimage copy. + * @param index - Index of the leaf of which to obtain the LeafPreimage copy. * @param includeUncommitted - If true, the uncommitted changes are included in the search. - * @returns A copy of the leaf data at the given index or undefined if the leaf was not found. + * @returns A copy of the leaf preimage at the given index or undefined if the leaf was not found. */ - public getLatestLeafDataCopy(index: number, includeUncommitted: boolean): LeafData | undefined { - const leaf = !includeUncommitted ? this.leaves[index] : this.cachedLeaves[index] ?? this.leaves[index]; - return leaf - ? ({ - value: leaf.value, - nextIndex: leaf.nextIndex, - nextValue: leaf.nextValue, - } as LeafData) - : undefined; + public async getLatestLeafPreimageCopy( + index: bigint, + includeUncommitted: boolean, + ): Promise { + const preimage = !includeUncommitted + ? await this.getDbPreimage(index) + : this.getCachedPreimage(index) ?? (await this.getDbPreimage(index)); + return preimage && this.leafPreimageFactory.clone(preimage); } /** - * Finds the index of the minimum value in an array. - * @param values - The collection of values to be searched. - * @returns The index of the minimum value in the array. + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param value - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). */ - private findMinIndex(values: bigint[]) { - if (!values.length) { - return 0; + public async findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise { + const leaf = this.leafFactory.fromBuffer(value); + let index = await this.db + .get(buildDbKeyForLeafIndex(this.getName(), leaf.getKey())) + .then(data => toBigIntBE(data)) + .catch(() => undefined); + + if (includeUncommitted && index === undefined) { + const cachedIndex = this.getCachedLeafIndex(leaf.getKey()); + index = cachedIndex; } - let minIndex = 0; - for (let i = 1; i < values.length; i++) { - if (values[minIndex] > values[i]) { - minIndex = i; - } - } - return minIndex; + return index; } /** @@ -220,66 +301,31 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { throw new Error(`Prefilled size must be at least 1!`); } - const leaves: LeafData[] = []; + const leaves: IndexedTreeLeafPreimage[] = []; for (let i = 0n; i < prefilledSize; i++) { - const newLeaf = { - value: toBigIntBE(Buffer.from([Number(i)])), - nextIndex: i + 1n, - nextValue: i + 1n, - }; - leaves.push(newLeaf); + const newLeaf = this.leafFactory.buildDummy(i); + const newLeafPreimage = this.leafPreimageFactory.fromLeaf(newLeaf, i + 1n, i + 1n); + leaves.push(newLeafPreimage); } - // Make the first leaf have 0 value - leaves[0].value = 0n; - // Make the last leaf point to the first leaf - leaves[prefilledSize - 1].nextIndex = 0n; - leaves[prefilledSize - 1].nextValue = 0n; + leaves[prefilledSize - 1] = this.leafPreimageFactory.fromLeaf(leaves[prefilledSize - 1].asLeaf(), 0n, 0n); await this.encodeAndAppendLeaves(leaves, true); await this.commit(); } - /** - * Loads Merkle tree data from a database and assigns them to this object. - */ - public async initFromDb(): Promise { - const startingIndex = 0n; - const values: LeafData[] = []; - const promise = new Promise((resolve, reject) => { - this.db - .createReadStream({ - gte: indexToKeyLeaf(this.getName(), startingIndex), - lte: indexToKeyLeaf(this.getName(), 2n ** BigInt(this.getDepth())), - }) - .on('data', function (data) { - const index = keyLeafToIndex(data.key.toString('utf-8')); - values[Number(index)] = decodeTreeValue(data.value); - }) - .on('close', function () {}) - .on('end', function () { - resolve(); - }) - .on('error', function () { - log.error('stream error'); - reject(); - }); - }); - await promise; - this.leaves = values; - } - /** * Commits all the leaves to the database and removes them from a cache. */ private async commitLeaves(): Promise { const batch = this.db.batch(); - const keys = Object.getOwnPropertyNames(this.cachedLeaves); + const keys = Object.getOwnPropertyNames(this.cachedLeafPreimages); for (const key of keys) { - const index = Number(key); - batch.put(indexToKeyLeaf(this.getName(), BigInt(index)), encodeTreeValue(this.cachedLeaves[index])); - this.leaves[index] = this.cachedLeaves[index]; + const leaf = this.cachedLeafPreimages[key]; + const index = BigInt(key); + batch.put(buildDbKeyForPreimage(this.getName(), index), leaf.toBuffer()); + batch.put(buildDbKeyForLeafIndex(this.getName(), leaf.getKey()), toBufferBE(index, 32)); } await batch.write(); this.clearCachedLeaves(); @@ -289,20 +335,21 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * Clears the cache. */ private clearCachedLeaves() { - this.cachedLeaves = {}; + this.cachedLeafPreimages = {}; } /** * Updates a leaf in the tree. - * @param leaf - New contents of the leaf. + * @param preimage - New contents of the leaf. * @param index - Index of the leaf to be updated. */ - protected async updateLeaf(leaf: LeafData, index: bigint) { + protected async updateLeaf(preimage: IndexedTreeLeafPreimage, index: bigint) { if (index > this.maxIndex) { throw Error(`Index out of bounds. Index ${index}, max index: ${this.maxIndex}.`); } - const encodedLeaf = this.encodeLeaf(leaf, true); + this.cachedLeafPreimages[index.toString()] = preimage; + const encodedLeaf = this.encodeLeaf(preimage, true); await this.addLeafToCacheAndHashToRoot(encodedLeaf, index); const numLeaves = this.getNumLeaves(true); if (index >= numLeaves) { @@ -426,45 +473,45 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { leaves: Buffer[], subtreeHeight: SubtreeHeight, ): Promise> { - const emptyLowLeafWitness = getEmptyLowLeafWitness(this.getDepth() as TreeHeight); + const emptyLowLeafWitness = getEmptyLowLeafWitness(this.getDepth() as TreeHeight, this.leafPreimageFactory); // Accumulators const lowLeavesWitnesses: LowLeafWitnessData[] = leaves.map(() => emptyLowLeafWitness); - const pendingInsertionSubtree: LeafData[] = leaves.map(() => zeroLeaf); + const pendingInsertionSubtree: IndexedTreeLeafPreimage[] = leaves.map(() => this.leafPreimageFactory.empty()); // Start info const startInsertionIndex = this.getNumLeaves(true); - const leavesToInsert = leaves.map(leaf => toBigIntBE(leaf)); + const leavesToInsert = leaves.map(leaf => this.leafFactory.fromBuffer(leaf)); const sortedDescendingLeafTuples = leavesToInsert .map((leaf, index) => ({ leaf, index })) - .sort((a, b) => Number(b.leaf - a.leaf)); + .sort((a, b) => Number(b.leaf.getKey() - a.leaf.getKey())); const sortedDescendingLeaves = sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf); // Get insertion path for each leaf for (let i = 0; i < leavesToInsert.length; i++) { - const newValue = sortedDescendingLeaves[i]; - const originalIndex = leavesToInsert.indexOf(newValue); + const newLeaf = sortedDescendingLeaves[i]; + const originalIndex = leavesToInsert.indexOf(newLeaf); - if (newValue === 0n) { + if (newLeaf.isEmpty()) { continue; } - const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true); - - // get the low leaf - const lowLeaf = this.getLatestLeafDataCopy(indexOfPrevious.index, true); - if (lowLeaf === undefined) { + const indexOfPrevious = await this.findIndexOfPreviousKey(newLeaf.getKey(), true); + if (indexOfPrevious === undefined) { return { lowLeavesWitnessData: undefined, - sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => new Fr(leafTuple.leaf).toBuffer()), + sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf.toBuffer()), sortedNewLeavesIndexes: sortedDescendingLeafTuples.map(leafTuple => leafTuple.index), newSubtreeSiblingPath: await this.getSubtreeSiblingPath(subtreeHeight, true), }; } + + // get the low leaf (existence checked in getting index) + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(indexOfPrevious.index, true))!; const siblingPath = await this.getSiblingPath(BigInt(indexOfPrevious.index), true); const witness: LowLeafWitnessData = { - leafData: { ...lowLeaf }, + leafPreimage: lowLeafPreimage, index: BigInt(indexOfPrevious.index), siblingPath, }; @@ -472,20 +519,23 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { // Update the running paths lowLeavesWitnesses[i] = witness; - const currentPendingLeaf: LeafData = { - value: newValue, - nextValue: lowLeaf.nextValue, - nextIndex: lowLeaf.nextIndex, - }; + const currentPendingPreimageLeaf = this.leafPreimageFactory.fromLeaf( + newLeaf, + lowLeafPreimage.getNextKey(), + lowLeafPreimage.getNextIndex(), + ); - pendingInsertionSubtree[originalIndex] = currentPendingLeaf; + pendingInsertionSubtree[originalIndex] = currentPendingPreimageLeaf; - lowLeaf.nextValue = newValue; - lowLeaf.nextIndex = startInsertionIndex + BigInt(originalIndex); + const newLowLeafPreimage = this.leafPreimageFactory.fromLeaf( + lowLeafPreimage.asLeaf(), + newLeaf.getKey(), + startInsertionIndex + BigInt(originalIndex), + ); const lowLeafIndex = indexOfPrevious.index; - this.cachedLeaves[lowLeafIndex] = lowLeaf; - await this.updateLeaf(lowLeaf, BigInt(lowLeafIndex)); + this.cachedLeafPreimages[lowLeafIndex.toString()] = newLowLeafPreimage; + await this.updateLeaf(newLowLeafPreimage, lowLeafIndex); } const newSubtreeSiblingPath = await this.getSubtreeSiblingPath( @@ -500,7 +550,7 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { return { lowLeavesWitnessData: lowLeavesWitnesses, - sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => Buffer.from(new Fr(leafTuple.leaf).toBuffer())), + sortedNewLeaves: sortedDescendingLeafTuples.map(leafTuple => leafTuple.leaf.toBuffer()), sortedNewLeavesIndexes: sortedDescendingLeafTuples.map(leafTuple => leafTuple.index), newSubtreeSiblingPath, }; @@ -527,19 +577,19 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { /** * Encodes leaves and appends them to a tree. - * @param leaves - Leaves to encode. + * @param preimages - Leaves to encode. * @param hash0Leaf - Indicates whether 0 value leaf should be hashed. See {@link encodeLeaf}. * @returns Empty promise */ - private async encodeAndAppendLeaves(leaves: LeafData[], hash0Leaf: boolean): Promise { - const startInsertionIndex = Number(this.getNumLeaves(true)); + private async encodeAndAppendLeaves(preimages: IndexedTreeLeafPreimage[], hash0Leaf: boolean): Promise { + const startInsertionIndex = this.getNumLeaves(true); - const serializedLeaves = leaves.map((leaf, i) => { - this.cachedLeaves[startInsertionIndex + i] = leaf; - return this.encodeLeaf(leaf, hash0Leaf); + const hashedLeaves = preimages.map((preimage, i) => { + this.cachedLeafPreimages[(startInsertionIndex + BigInt(i)).toString()] = preimage; + return this.encodeLeaf(preimage, hash0Leaf); }); - await super.appendLeaves(serializedLeaves); + await super.appendLeaves(hashedLeaves); } /** @@ -550,14 +600,12 @@ export class StandardIndexedTree extends TreeBase implements IndexedTree { * nullifier it is improbable that a valid nullifier would be 0. * @returns Leaf encoded in a buffer. */ - private encodeLeaf(leaf: LeafData, hash0Leaf: boolean): Buffer { + private encodeLeaf(leaf: IndexedTreeLeafPreimage, hash0Leaf: boolean): Buffer { let encodedLeaf; - if (!hash0Leaf && leaf.value == 0n) { + if (!hash0Leaf && leaf.getKey() == 0n) { encodedLeaf = toBufferBE(0n, 32); } else { - encodedLeaf = this.hasher.hashInputs( - [leaf.value, leaf.nextIndex, leaf.nextValue].map(val => toBufferBE(val, 32)), - ); + encodedLeaf = this.hasher.hashInputs(leaf.toHashInputs()); } return encodedLeaf; } diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts index 2f6db0b1ac1..9ebc8c30472 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree.test.ts @@ -1,3 +1,4 @@ +import { Fr, NullifierLeaf, NullifierLeafPreimage } from '@aztec/circuits.js'; import { toBufferBE } from '@aztec/foundation/bigint-buffer'; import { Hasher, SiblingPath } from '@aztec/types'; @@ -8,16 +9,22 @@ import { treeTestSuite } from '../../test/test_suite.js'; import { createMemDown } from '../../test/utils/create_mem_down.js'; import { StandardIndexedTreeWithAppend } from './standard_indexed_tree_with_append.js'; +class NullifierTree extends StandardIndexedTreeWithAppend { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + const createDb = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string, depth: number, prefilledSize = 1) => { - return await newTree(StandardIndexedTreeWithAppend, levelUp, hasher, name, depth, prefilledSize); + return await newTree(NullifierTree, levelUp, hasher, name, depth, prefilledSize); }; const createFromName = async (levelUp: levelup.LevelUp, hasher: Hasher, name: string) => { - return await loadTree(StandardIndexedTreeWithAppend, levelUp, hasher, name); + return await loadTree(NullifierTree, levelUp, hasher, name); }; -const createIndexedTreeLeaf = (value: number, nextIndex: number, nextValue: number) => { - return [toBufferBE(BigInt(value), 32), toBufferBE(BigInt(nextIndex), 32), toBufferBE(BigInt(nextValue), 32)]; +const createIndexedTreeLeafHashInputs = (value: number, nextIndex: number, nextValue: number) => { + return new NullifierLeafPreimage(new Fr(value), new Fr(nextValue), BigInt(nextIndex)).toHashInputs(); }; const verifyCommittedState = async ( @@ -57,7 +64,7 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 0 0 0 0 0 0 0 0. */ - const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 0, 0)); const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); @@ -91,8 +98,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, level1ZeroHash); root = pedersen.hash(e20, level2ZeroHash); @@ -118,8 +125,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 1, 30)); e10 = pedersen.hash(index0Hash, index1Hash); let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); e20 = pedersen.hash(e10, e11); @@ -151,8 +158,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 10 0 20 30 0 0 0 0. */ e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(20, 1, 30)); e11 = pedersen.hash(index2Hash, index3Hash); e20 = pedersen.hash(e10, e11); root = pedersen.hash(e20, level2ZeroHash); @@ -182,8 +189,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 4 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 4, 50)); - const index4Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 4, 50)); + const index4Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(50, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, e11); const e12 = pedersen.hash(index4Hash, INITIAL_LEAF); @@ -255,7 +262,7 @@ describe('StandardIndexedTreeSpecific', () => { */ const INITIAL_LEAF = toBufferBE(0n, 32); - const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeaf(0, 0, 0)); + const initialLeafHash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 0, 0)); const level1ZeroHash = pedersen.hash(INITIAL_LEAF, INITIAL_LEAF); const level2ZeroHash = pedersen.hash(level1ZeroHash, level1ZeroHash); let index0Hash = initialLeafHash; @@ -289,8 +296,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 1 0 0 0 0 0 0 0 * nextVal 30 0 0 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 1, 30)); - let index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 0, 0)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 1, 30)); + let index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, level1ZeroHash); root = pedersen.hash(e20, level2ZeroHash); @@ -315,8 +322,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 0 1 0 0 0 0 0 * nextVal 10 0 30 0 0 0 0 0. */ - index0Hash = pedersen.hashInputs(createIndexedTreeLeaf(0, 2, 10)); - let index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 1, 30)); + index0Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(0, 2, 10)); + let index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 1, 30)); e10 = pedersen.hash(index0Hash, index1Hash); let e11 = pedersen.hash(index2Hash, INITIAL_LEAF); e20 = pedersen.hash(e10, e11); @@ -348,8 +355,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextVal 10 0 20 30 0 0 0 0. */ e10 = pedersen.hash(index0Hash, index1Hash); - index2Hash = pedersen.hashInputs(createIndexedTreeLeaf(10, 3, 20)); - const index3Hash = pedersen.hashInputs(createIndexedTreeLeaf(20, 1, 30)); + index2Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(10, 3, 20)); + const index3Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(20, 1, 30)); e11 = pedersen.hash(index2Hash, index3Hash); e20 = pedersen.hash(e10, e11); root = pedersen.hash(e20, level2ZeroHash); @@ -387,8 +394,8 @@ describe('StandardIndexedTreeSpecific', () => { * nextIdx 2 6 3 1 0 0 0 0 * nextVal 10 50 20 30 0 0 0 0. */ - index1Hash = pedersen.hashInputs(createIndexedTreeLeaf(30, 6, 50)); - const index6Hash = pedersen.hashInputs(createIndexedTreeLeaf(50, 0, 0)); + index1Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(30, 6, 50)); + const index6Hash = pedersen.hashInputs(createIndexedTreeLeafHashInputs(50, 0, 0)); e10 = pedersen.hash(index0Hash, index1Hash); e20 = pedersen.hash(e10, e11); const e13 = pedersen.hash(index6Hash, INITIAL_LEAF); @@ -469,4 +476,20 @@ describe('StandardIndexedTreeSpecific', () => { const actualRoot = insertTree.getRoot(true); expect(actualRoot).toEqual(expectedRoot); }); + + it('should be able to find indexes of leaves', async () => { + const db = levelup(createMemDown()); + const tree = await createDb(db, pedersen, 'test', 3); + const values = [Buffer.alloc(32, 1), Buffer.alloc(32, 2)]; + + await tree.appendLeaves([values[0]]); + + expect(await tree.findLeafIndex(values[0], true)).toBeDefined(); + expect(await tree.findLeafIndex(values[0], false)).toBe(undefined); + expect(await tree.findLeafIndex(values[1], true)).toBe(undefined); + + await tree.commit(); + + expect(await tree.findLeafIndex(values[0], false)).toBeDefined(); + }); }); diff --git a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts index 49a90e611f1..990f4e6ef5f 100644 --- a/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts +++ b/yarn-project/merkle-tree/src/standard_indexed_tree/test/standard_indexed_tree_with_append.ts @@ -1,6 +1,3 @@ -import { toBigIntBE } from '@aztec/foundation/bigint-buffer'; -import { LeafData } from '@aztec/types'; - import { StandardIndexedTree } from '../../index.js'; /** @@ -27,10 +24,10 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { * @returns Empty promise. */ private async appendLeaf(leaf: Buffer): Promise { - const newValue = toBigIntBE(leaf); + const newLeaf = this.leafFactory.fromBuffer(leaf); // Special case when appending zero - if (newValue === 0n) { + if (newLeaf.getKey() === 0n) { const newSize = (this.cachedSize ?? this.size) + 1n; if (newSize - 1n > this.maxIndex) { throw Error(`Can't append beyond max index. Max index: ${this.maxIndex}`); @@ -39,27 +36,31 @@ export class StandardIndexedTreeWithAppend extends StandardIndexedTree { return; } - const indexOfPrevious = this.findIndexOfPreviousValue(newValue, true); - const previousLeafCopy = this.getLatestLeafDataCopy(indexOfPrevious.index, true); - - if (previousLeafCopy === undefined) { + const lowLeafIndex = await this.findIndexOfPreviousKey(newLeaf.getKey(), true); + if (lowLeafIndex === undefined) { throw new Error(`Previous leaf not found!`); } - const newLeaf = { - value: newValue, - nextIndex: previousLeafCopy.nextIndex, - nextValue: previousLeafCopy.nextValue, - } as LeafData; - if (indexOfPrevious.alreadyPresent) { + const lowLeafPreimage = (await this.getLatestLeafPreimageCopy(lowLeafIndex.index, true))!; + + const newLeafPreimage = this.leafPreimageFactory.fromLeaf( + newLeaf, + lowLeafPreimage.getNextKey(), + lowLeafPreimage.getNextIndex(), + ); + + if (lowLeafIndex.alreadyPresent) { return; } // insert a new leaf at the highest index and update the values of our previous leaf copy const currentSize = this.getNumLeaves(true); - previousLeafCopy.nextIndex = BigInt(currentSize); - previousLeafCopy.nextValue = newLeaf.value; - this.cachedLeaves[Number(currentSize)] = newLeaf; - this.cachedLeaves[Number(indexOfPrevious.index)] = previousLeafCopy; - await this.updateLeaf(previousLeafCopy, BigInt(indexOfPrevious.index)); - await this.updateLeaf(newLeaf, this.getNumLeaves(true)); + const newLowLeafPreimage = this.leafPreimageFactory.fromLeaf( + lowLeafPreimage.asLeaf(), + newLeaf.getKey(), + BigInt(currentSize), + ); + this.cachedLeafPreimages[Number(currentSize)] = newLeafPreimage; + this.cachedLeafPreimages[Number(lowLeafIndex.index)] = newLowLeafPreimage; + await this.updateLeaf(newLowLeafPreimage, BigInt(lowLeafIndex.index)); + await this.updateLeaf(newLeafPreimage, this.getNumLeaves(true)); } } diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts index ee3191f42ff..b211017d851 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.test.ts @@ -69,4 +69,20 @@ describe('StandardTree_batchAppend', () => { expect(tree.getRoot(true)).toEqual(root); }); + + it('should be able to find indexes of leaves', async () => { + const db = levelup(createMemDown()); + const tree = await createDb(db, pedersen, 'test', 3); + const values = [Buffer.alloc(32, 1), Buffer.alloc(32, 2)]; + + await tree.appendLeaves([values[0]]); + + expect(await tree.findLeafIndex(values[0], true)).toBeDefined(); + expect(await tree.findLeafIndex(values[0], false)).toBe(undefined); + expect(await tree.findLeafIndex(values[1], true)).toBe(undefined); + + await tree.commit(); + + expect(await tree.findLeafIndex(values[0], false)).toBeDefined(); + }); }); diff --git a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts index 0b92572a4b8..55b4f532469 100644 --- a/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts +++ b/yarn-project/merkle-tree/src/standard_tree/standard_tree.ts @@ -24,4 +24,14 @@ export class StandardTree extends TreeBase implements AppendOnlyTree { public getSnapshot(block: number): Promise { return this.#snapshotBuilder.getSnapshot(block); } + + public async findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise { + for (let i = 0n; i < this.getNumLeaves(includeUncommitted); i++) { + const currentValue = await this.getLeafValue(i, includeUncommitted); + if (currentValue && currentValue.equals(value)) { + return i; + } + } + return undefined; + } } diff --git a/yarn-project/merkle-tree/src/tree_base.ts b/yarn-project/merkle-tree/src/tree_base.ts index c57a0499171..291ac258082 100644 --- a/yarn-project/merkle-tree/src/tree_base.ts +++ b/yarn-project/merkle-tree/src/tree_base.ts @@ -241,13 +241,6 @@ export abstract class TreeBase implements MerkleTree { await this.writeMeta(); } - /** - * Initializes the tree from the database. - */ - public async initFromDb(): Promise { - // Implemented only by Indexed Tree to populate the leaf cache. - } - /** * Writes meta data to the provided batch. * @param batch - The batch to which to write the meta data. @@ -307,4 +300,12 @@ export abstract class TreeBase implements MerkleTree { } this.cachedSize = numLeaves + BigInt(leaves.length); } + + /** + * Returns the index of a leaf given its value, or undefined if no leaf with that value is found. + * @param value - The leaf value to look for. + * @param includeUncommitted - Indicates whether to include uncommitted data. + * @returns The index of the first leaf found with a given value (undefined if not found). + */ + abstract findLeafIndex(value: Buffer, includeUncommitted: boolean): Promise; } diff --git a/yarn-project/merkle-tree/tsconfig.json b/yarn-project/merkle-tree/tsconfig.json index 831130c7c84..35f81f8b801 100644 --- a/yarn-project/merkle-tree/tsconfig.json +++ b/yarn-project/merkle-tree/tsconfig.json @@ -11,6 +11,9 @@ }, { "path": "../types" + }, + { + "path": "../circuits.js" } ], "include": ["src"] diff --git a/yarn-project/noir-compiler/package.json b/yarn-project/noir-compiler/package.json index 2232e05b021..e03849b0de2 100644 --- a/yarn-project/noir-compiler/package.json +++ b/yarn-project/noir-compiler/package.json @@ -45,6 +45,7 @@ } }, "dependencies": { + "@aztec/circuits.js": "workspace:^", "@aztec/foundation": "workspace:^", "@ltd/j-toml": "^1.38.0", "@noir-lang/noir_wasm": "portal:../../noir/packages/noir_wasm", diff --git a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap index 1c00e7c76f5..b9e79a46077 100644 --- a/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap +++ b/yarn-project/noir-compiler/src/__snapshots__/index.test.ts.snap @@ -47,7 +47,7 @@ exports[`noir-compiler using nargo generates Aztec.nr external interface 1`] = ` use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; @@ -240,7 +240,7 @@ exports[`noir-compiler using wasm binary generates Aztec.nr external interface 1 use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; diff --git a/yarn-project/noir-compiler/src/cli.ts b/yarn-project/noir-compiler/src/cli.ts index 837975c8075..7416abed4cc 100644 --- a/yarn-project/noir-compiler/src/cli.ts +++ b/yarn-project/noir-compiler/src/cli.ts @@ -3,18 +3,14 @@ import { createConsoleLogger } from '@aztec/foundation/log'; import { Command } from 'commander'; -import { compileNoir } from './cli/compileNoir.js'; -import { generateNoirInterface } from './cli/noir-interface.js'; -import { generateTypescriptInterface } from './cli/typescript.js'; +import { addNoirCompilerCommanderActions } from './cli/add_noir_compiler_commander_actions.js'; const program = new Command(); const log = createConsoleLogger('aztec:compiler-cli'); const main = async () => { program.name('aztec-compile'); - compileNoir(program, 'compile', log); - generateTypescriptInterface(program, 'typescript', log); - generateNoirInterface(program, 'interface', log); + addNoirCompilerCommanderActions(program, log); await program.parseAsync(process.argv); }; diff --git a/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts new file mode 100644 index 00000000000..f651eecd996 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/add_noir_compiler_commander_actions.ts @@ -0,0 +1,67 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { Command } from 'commander'; + +/** + * CLI options for configuring behavior + */ +interface Options { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + typescript: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + interface: string | undefined; + // eslint-disable-next-line jsdoc/require-jsdoc + compiler: string | undefined; +} + +/** + * + */ +export function addNoirCompilerCommanderActions(program: Command, log: LogFn = () => {}) { + program + .command('compile') + .argument('', 'Path to the bin or Aztec.nr project to compile') + .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') + .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) + .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) + .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') + .description('Compiles the Noir Source in the target project') + + .action(async (projectPath: string, options: Options) => { + const { compileNoir } = await import('./compile_noir.js'); + await compileNoir(projectPath, options, log); + }); + + program + .command('generate-typescript') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + + .action(async (projectPath: string, options) => { + const { generateTypescriptInterface } = await import('./generate_typescript_interface.js'); + generateTypescriptInterface(projectPath, options, log); + }); + + return program + .command('generate-noir-interface') + .argument('', 'Path to the noir project') + .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') + .option( + '-o, --outdir ', + 'Output folder for the generated noir interfaces, relative to the project path', + 'interfaces', + ) + .description('Generates Noir interfaces from the artifacts in the given project') + .action(async (projectPath: string, options) => { + const { generateNoirInterface } = await import('./generate_noir_interface.js'); + generateNoirInterface(projectPath, options, log); + }); +} diff --git a/yarn-project/noir-compiler/src/cli/compileNoir.ts b/yarn-project/noir-compiler/src/cli/compile_noir.ts similarity index 75% rename from yarn-project/noir-compiler/src/cli/compileNoir.ts rename to yarn-project/noir-compiler/src/cli/compile_noir.ts index 7e00fed39fa..f87055464a9 100644 --- a/yarn-project/noir-compiler/src/cli/compileNoir.ts +++ b/yarn-project/noir-compiler/src/cli/compile_noir.ts @@ -1,7 +1,6 @@ import { ContractArtifact } from '@aztec/foundation/abi'; import { LogFn } from '@aztec/foundation/log'; -import { Command } from 'commander'; import { mkdirSync, writeFileSync } from 'fs'; import { mkdirpSync } from 'fs-extra'; import path, { resolve } from 'path'; @@ -34,32 +33,21 @@ interface Options { * @param log - Optional logging function. * @returns The program with the command registered. */ -export function compileNoir(program: Command, name = 'compile', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the bin or Aztec.nr project to compile') - .option('-o, --outdir ', 'Output folder for the binary artifacts, relative to the project path', 'target') - .option('-ts, --typescript ', 'Optional output folder for generating typescript wrappers', undefined) - .option('-i, --interface ', 'Optional output folder for generating an Aztec.nr contract interface', undefined) - .option('-c --compiler ', 'Which compiler to use. Either nargo or wasm. Defaults to nargo', 'wasm') - .description('Compiles the Noir Source in the target project') - - .action(async (projectPath: string, options: Options) => { - const { compiler } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - if (compiler !== 'nargo' && compiler !== 'wasm') { - throw new Error(`Invalid compiler: ${compiler}`); - } +export async function compileNoir(projectPath: string, options: Options, log: LogFn = () => {}) { + const { compiler } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + if (compiler !== 'nargo' && compiler !== 'wasm') { + throw new Error(`Invalid compiler: ${compiler}`); + } - const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; - log(`Compiling ${projectPath} with ${compiler} backend...`); - const results = await compile(projectPath, { log }); - for (const result of results) { - generateOutput(projectPath, result, options, log); - } - }); + const compile = compiler === 'wasm' ? compileUsingNoirWasm : compileUsingNargo; + log(`Compiling ${projectPath} with ${compiler} backend...`); + const results = await compile(projectPath, { log }); + for (const result of results) { + generateOutput(projectPath, result, options, log); + } } /** diff --git a/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts new file mode 100644 index 00000000000..4a13a1c756f --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_noir_interface.ts @@ -0,0 +1,48 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateNoirContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * + */ +export function generateNoirInterface( + projectPath: string, + options: { + // eslint-disable-next-line jsdoc/require-jsdoc + outdir: string; + // eslint-disable-next-line jsdoc/require-jsdoc + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); + log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); + try { + const noirInterface = generateNoirContractInterface(contract); + mkdirpSync(path.dirname(interfacePath)); + writeFileSync(interfacePath, noirInterface); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts new file mode 100644 index 00000000000..d004706c257 --- /dev/null +++ b/yarn-project/noir-compiler/src/cli/generate_typescript_interface.ts @@ -0,0 +1,57 @@ +import { LogFn } from '@aztec/foundation/log'; + +import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; +import { mkdirpSync } from 'fs-extra'; +import path, { resolve } from 'path'; + +import { generateTypescriptContractInterface } from '../index.js'; +import { isContractArtifact } from '../utils.js'; + +/** + * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. + * @param program - Commander program. + * @param log - Optional logging function. + * @returns The program with the command registered. + */ +export function generateTypescriptInterface( + projectPath: string, + options: { + /* eslint-disable jsdoc/require-jsdoc */ + outdir: string; + /* eslint-disable jsdoc/require-jsdoc */ + artifacts: string; + }, + log: LogFn, +) { + const { outdir, artifacts } = options; + if (typeof projectPath !== 'string') { + throw new Error(`Missing project path argument`); + } + const currentDir = process.cwd(); + + const artifactsDir = resolve(projectPath, artifacts); + for (const artifactsDirItem of readdirSync(artifactsDir)) { + const artifactPath = resolve(artifactsDir, artifactsDirItem); + if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { + const contract = JSON.parse(readFileSync(artifactPath).toString()); + if (!isContractArtifact(contract)) { + continue; + } + const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); + log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); + let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); + if (relativeArtifactPath === `${contract.name}.json`) { + // relative path edge case, prepending ./ for local import - the above logic just does + // `${contract.name}.json`, which is not a valid import for a file in the same directory + relativeArtifactPath = `./${contract.name}.json`; + } + try { + const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); + mkdirpSync(path.dirname(tsPath)); + writeFileSync(tsPath, tsWrapper); + } catch (err) { + log(`Error generating interface for ${artifactPath}: ${err}`); + } + } + } +} diff --git a/yarn-project/noir-compiler/src/cli/index.ts b/yarn-project/noir-compiler/src/cli/index.ts index 0687e01706f..df1ae3d0880 100644 --- a/yarn-project/noir-compiler/src/cli/index.ts +++ b/yarn-project/noir-compiler/src/cli/index.ts @@ -1,3 +1 @@ -export { compileNoir } from './compileNoir.js'; -export { generateNoirInterface } from './noir-interface.js'; -export { generateTypescriptInterface } from './typescript.js'; +export { addNoirCompilerCommanderActions } from './add_noir_compiler_commander_actions.js'; diff --git a/yarn-project/noir-compiler/src/cli/noir-interface.ts b/yarn-project/noir-compiler/src/cli/noir-interface.ts deleted file mode 100644 index 54db5b7b86b..00000000000 --- a/yarn-project/noir-compiler/src/cli/noir-interface.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateNoirContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'interface' command on the given commander program that generates a Noir interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateNoirInterface(program: Command, name = 'interface', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated noir interfaces, relative to the project path', - 'interfaces', - ) - .description('Generates Noir interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const interfacePath = resolve(projectPath, outdir, `${contract.name}_interface.nr`); - log(`Writing ${contract.name} Noir external interface to ${path.relative(currentDir, interfacePath)}`); - try { - const noirInterface = generateNoirContractInterface(contract); - mkdirpSync(path.dirname(interfacePath)); - writeFileSync(interfacePath, noirInterface); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-compiler/src/cli/typescript.ts b/yarn-project/noir-compiler/src/cli/typescript.ts deleted file mode 100644 index cf107cc0338..00000000000 --- a/yarn-project/noir-compiler/src/cli/typescript.ts +++ /dev/null @@ -1,72 +0,0 @@ -import { LogFn } from '@aztec/foundation/log'; - -import { Command } from 'commander'; -import { readFileSync, readdirSync, statSync, writeFileSync } from 'fs'; -import { mkdirpSync } from 'fs-extra'; -import path, { resolve } from 'path'; - -import { generateTypescriptContractInterface } from '../index.js'; -import { isContractArtifact } from '../utils.js'; - -/** - * Registers a 'typescript' command on the given commander program that generates typescript interface out of an ABI. - * @param program - Commander program. - * @param log - Optional logging function. - * @returns The program with the command registered. - */ -export function generateTypescriptInterface(program: Command, name = 'typescript', log: LogFn = () => {}): Command { - return program - .command(name) - .argument('', 'Path to the noir project') - .option('--artifacts ', 'Folder containing the compiled artifacts, relative to the project path', 'target') - .option( - '-o, --outdir ', - 'Output folder for the generated typescript wrappers, relative to the project path', - 'types', - ) - .description('Generates typescript interfaces from the artifacts in the given project') - - .action( - ( - projectPath: string, - /* eslint-disable jsdoc/require-jsdoc */ - options: { - outdir: string; - artifacts: string; - }, - /* eslint-enable jsdoc/require-jsdoc */ - ) => { - const { outdir, artifacts } = options; - if (typeof projectPath !== 'string') { - throw new Error(`Missing project path argument`); - } - const currentDir = process.cwd(); - - const artifactsDir = resolve(projectPath, artifacts); - for (const artifactsDirItem of readdirSync(artifactsDir)) { - const artifactPath = resolve(artifactsDir, artifactsDirItem); - if (statSync(artifactPath).isFile() && artifactPath.endsWith('.json')) { - const contract = JSON.parse(readFileSync(artifactPath).toString()); - if (!isContractArtifact(contract)) { - continue; - } - const tsPath = resolve(projectPath, outdir, `${contract.name}.ts`); - log(`Writing ${contract.name} typescript interface to ${path.relative(currentDir, tsPath)}`); - let relativeArtifactPath = path.relative(path.dirname(tsPath), artifactPath); - if (relativeArtifactPath === `${contract.name}.json`) { - // relative path edge case, prepending ./ for local import - the above logic just does - // `${contract.name}.json`, which is not a valid import for a file in the same directory - relativeArtifactPath = `./${contract.name}.json`; - } - try { - const tsWrapper = generateTypescriptContractInterface(contract, relativeArtifactPath); - mkdirpSync(path.dirname(tsPath)); - writeFileSync(tsPath, tsWrapper); - } catch (err) { - log(`Error generating interface for ${artifactPath}: ${err}`); - } - } - } - }, - ); -} diff --git a/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts b/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts index 356c891564d..8c9380b77ef 100644 --- a/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts +++ b/yarn-project/noir-compiler/src/compile/noir/dependencies/github-dependency-resolver.ts @@ -71,8 +71,11 @@ export class GithubDependencyResolver implements NoirDependencyResolver { async #extractZip(dependency: NoirGitDependencyConfig, archivePath: string): Promise { const gitUrl = new URL(dependency.git); + // extract the archive to this location const extractLocation = join('libs', safeFilename(gitUrl.pathname + '@' + (dependency.tag ?? 'HEAD'))); - const tmpExtractLocation = extractLocation + '.tmp'; + + // where we expect to find this package after extraction + // it might already exist if the archive got unzipped previously const packagePath = join(extractLocation, dependency.directory ?? ''); if (this.#fm.hasFileSync(packagePath)) { @@ -82,24 +85,21 @@ export class GithubDependencyResolver implements NoirDependencyResolver { const { entries } = await unzip(this.#fm.readFileSync(archivePath)); + // extract to a temporary directory, then move it to the final location + // TODO empty the temp directory first + const tmpExtractLocation = extractLocation + '.tmp'; for (const entry of Object.values(entries)) { if (entry.isDirectory) { continue; } + // remove the first path segment, because it'll be the archive name const name = stripSegments(entry.name, 1); - if (dependency.directory && !name.startsWith(dependency.directory)) { - continue; - } const path = join(tmpExtractLocation, name); await this.#fm.writeFile(path, (await entry.blob()).stream()); } - if (dependency.directory) { - this.#fm.moveFileSync(join(tmpExtractLocation, dependency.directory), packagePath); - } else { - this.#fm.moveFileSync(tmpExtractLocation, packagePath); - } + this.#fm.moveFileSync(tmpExtractLocation, extractLocation); return packagePath; } diff --git a/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts b/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts index 36e2e24893b..720b7fe942f 100644 --- a/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts +++ b/yarn-project/noir-compiler/src/compile/noir/dependencies/local-dependency-resolver.ts @@ -1,6 +1,6 @@ import { NoirDependencyConfig } from '@aztec/foundation/noir'; -import { resolve } from 'path'; +import { isAbsolute, join } from 'path'; import { FileManager } from '../file-manager/file-manager.js'; import { NoirPackage } from '../package.js'; @@ -16,12 +16,14 @@ export class LocalDependencyResolver implements NoirDependencyResolver { this.#fm = fm; } - resolveDependency(pkg: NoirPackage, config: NoirDependencyConfig): Promise { + resolveDependency(parent: NoirPackage, config: NoirDependencyConfig): Promise { if ('path' in config) { + const parentPath = parent.getPackagePath(); + const dependencyPath = isAbsolute(config.path) ? config.path : join(parentPath, config.path); return Promise.resolve({ // unknown version, Nargo.toml doesn't have a version field version: undefined, - package: NoirPackage.open(resolve(pkg.getPackagePath(), config.path), this.#fm), + package: NoirPackage.open(dependencyPath, this.#fm), }); } else { return Promise.resolve(null); diff --git a/yarn-project/noir-compiler/src/contract-interface-gen/abi.ts b/yarn-project/noir-compiler/src/contract-interface-gen/abi.ts index 61b47ea24d6..df819ae6578 100644 --- a/yarn-project/noir-compiler/src/contract-interface-gen/abi.ts +++ b/yarn-project/noir-compiler/src/contract-interface-gen/abi.ts @@ -1,3 +1,4 @@ +import { FUNCTION_TREE_HEIGHT } from '@aztec/circuits.js'; import { ContractArtifact, DebugMetadata, FunctionArtifact, FunctionType } from '@aztec/foundation/abi'; import { deflate } from 'pako'; @@ -88,6 +89,9 @@ export function generateContractArtifact( { contract, debug }: NoirContractCompilationArtifacts, aztecNrVersion?: string, ): ContractArtifact { + if (contract.functions.length > 2 ** FUNCTION_TREE_HEIGHT) { + throw new Error(`Contract can only have a maximum of ${2 ** FUNCTION_TREE_HEIGHT} functions`); + } const originalFunctions = contract.functions; // TODO why sort? we should have idempotent compilation so this should not be needed. const sortedFunctions = [...contract.functions].sort((fnA, fnB) => fnA.name.localeCompare(fnB.name)); diff --git a/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts b/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts index ab91b78d8d6..ecc5ffb771d 100644 --- a/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts +++ b/yarn-project/noir-compiler/src/contract-interface-gen/noir.ts @@ -167,7 +167,7 @@ ${callStatement} function generateStaticImports() { return `use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH;`; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH;`; } /** diff --git a/yarn-project/noir-compiler/tsconfig.json b/yarn-project/noir-compiler/tsconfig.json index a2d81aa678d..a6b3ad94790 100644 --- a/yarn-project/noir-compiler/tsconfig.json +++ b/yarn-project/noir-compiler/tsconfig.json @@ -6,6 +6,9 @@ "tsBuildInfoFile": ".tsbuildinfo" }, "references": [ + { + "path": "../circuits.js" + }, { "path": "../foundation" } diff --git a/yarn-project/noir-contracts/scripts/compile.sh b/yarn-project/noir-contracts/scripts/compile.sh index fe2096f4cf4..bc217a6156b 100755 --- a/yarn-project/noir-contracts/scripts/compile.sh +++ b/yarn-project/noir-contracts/scripts/compile.sh @@ -19,4 +19,4 @@ build() { export -f build # run 4 builds at a time -echo "$@" | xargs -n 1 -P 4 bash -c 'build "$0"' +echo "$@" | xargs -n 1 -P $(nproc) bash -c 'build "$0"' diff --git a/yarn-project/noir-contracts/scripts/types.sh b/yarn-project/noir-contracts/scripts/types.sh index 2747a38ef9b..ebbc24bdc5f 100755 --- a/yarn-project/noir-contracts/scripts/types.sh +++ b/yarn-project/noir-contracts/scripts/types.sh @@ -1,4 +1,3 @@ - #!/bin/bash # Example: @@ -8,7 +7,7 @@ # Enable strict mode: # Exit on error (set -e), treat unset variables as an error (set -u), -set -eu; +set -eu artifacts_dir="src/artifacts" types_dir="src/types" @@ -17,42 +16,40 @@ types_dir="src/types" mkdir -p $types_dir mkdir -p $artifacts_dir - ROOT=$(pwd) write_import() { - CONTRACT_NAME=$1 - NAME=$(echo $CONTRACT_NAME | perl -pe 's/(^|_)(\w)/\U$2/g') + CONTRACT_NAME=$1 + NAME=$(echo $CONTRACT_NAME | perl -pe 's/(^|_)(\w)/\U$2/g') - echo "import ${NAME}Json from './${CONTRACT_NAME}_contract.json' assert { type: 'json' };" >> "$artifacts_dir/index.ts"; + echo "import ${NAME}Json from './${CONTRACT_NAME}_contract.json' assert { type: 'json' };" >>"$artifacts_dir/index.ts" } write_export() { - CONTRACT_NAME=$1 - NAME=$(echo $CONTRACT_NAME | perl -pe 's/(^|_)(\w)/\U$2/g') + CONTRACT_NAME=$1 + NAME=$(echo $CONTRACT_NAME | perl -pe 's/(^|_)(\w)/\U$2/g') - # artifacts - echo "export const ${NAME}ContractArtifact = ${NAME}Json as ContractArtifact;" >> "$artifacts_dir/index.ts"; - echo "Written typescript for $NAME" + # artifacts + echo "export const ${NAME}ContractArtifact = ${NAME}Json as ContractArtifact;" >>"$artifacts_dir/index.ts" + echo "Written typescript for $NAME" - # types - echo "export * from './${CONTRACT_NAME}.js';" >> "$types_dir/index.ts"; + # types + echo "export * from './${CONTRACT_NAME}.js';" >>"$types_dir/index.ts" } - process() { CONTRACT=$1 cd $ROOT - NODE_OPTIONS=--no-warnings yarn ts-node --esm src/scripts/copy_source.ts $CONTRACT_NAME + node --no-warnings --loader ts-node/esm src/scripts/copy_source.ts $CONTRACT_NAME echo "Creating types for $CONTRACT" - NODE_OPTIONS=--no-warnings yarn ts-node --esm src/scripts/copy_output.ts $CONTRACT_NAME + node --no-warnings --loader ts-node/esm src/scripts/copy_output.ts $CONTRACT_NAME } -format(){ +format() { echo "Formatting contract folders" - yarn run -T prettier -w ../aztec.js/src/artifacts/*.json ./$types_dir/*.ts + yarn run -T prettier -w ../aztec.js/src/artifacts/*.json ./$types_dir/*.ts echo -e "Done\n" } @@ -69,14 +66,14 @@ wait rm -f $artifacts_dir/index.ts || true # Generate artifacts package index.ts -echo "// Auto generated module\n" > "$artifacts_dir/index.ts"; -echo "import { ContractArtifact } from '@aztec/foundation/abi';" >> "$artifacts_dir/index.ts"; +echo "// Auto generated module\n" >"$artifacts_dir/index.ts" +echo "import { ContractArtifact } from '@aztec/foundation/abi';" >>"$artifacts_dir/index.ts" # Generate types package index.ts -echo "// Auto generated module\n" > "$types_dir/index.ts"; +echo "// Auto generated module\n" >"$types_dir/index.ts" for CONTRACT_NAME in "$@"; do - write_import $CONTRACT_NAME - write_export $CONTRACT_NAME + write_import $CONTRACT_NAME + write_export $CONTRACT_NAME done # only run the rest when the full flag is set diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml index 03bfe065166..32e7ab21040 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/Nargo.toml @@ -7,3 +7,4 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } value_note = { path = "../../../../aztec-nr/value-note"} +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr index f5f5636132b..560dc4fb8e9 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/cards.nr @@ -1,6 +1,6 @@ +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use dep::aztec::{ context::{PrivateContext, PublicContext, Context}, - constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}, note::{ note_getter_options::NoteGetterOptions, note_viewer_options::NoteViewerOptions, diff --git a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr index 2792859e9b4..9edea67a00c 100644 --- a/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/card_game_contract/src/main.nr @@ -2,6 +2,7 @@ mod cards; mod game; contract CardGame { + use dep::protocol_types::constants::MAX_NOTES_PER_PAGE; use dep::aztec::{ context::Context, hash::pedersen_hash, @@ -22,7 +23,6 @@ contract CardGame { use dep::aztec::{ abi, - constants_gen::{MAX_NOTES_PER_PAGE}, abi::{ Hasher, PrivateContextInputs, }, diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml index 4fa0ca7d4e2..5eccd21c042 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr index 474fb20c6fc..9e3234812f9 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/actions.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; +use dep::protocol_types::constants::{MAX_NOTES_PER_PAGE, MAX_READ_REQUESTS_PER_CALL}; use dep::aztec::note::{ note_getter_options::NoteGetterOptions, note_viewer_options::NoteViewerOptions, }; diff --git a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr index c225942b9fb..6ac7442ea2f 100644 --- a/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr +++ b/yarn-project/noir-contracts/src/contracts/docs_example_contract/src/options.nr @@ -1,5 +1,5 @@ use crate::types::card_note::{CardNote, CARD_NOTE_LEN}; -use dep::aztec::constants_gen::MAX_READ_REQUESTS_PER_CALL; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::note::note_getter_options::{NoteGetterOptions, Sort, SortOrder}; use dep::std::option::Option; diff --git a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml index 21ef226c194..2e29971c5dc 100644 --- a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr index 8e1de0cf189..f428fc88426 100644 --- a/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/easy_private_voting_contract/src/main.nr @@ -1,5 +1,6 @@ contract EasyPrivateVoting { // docs:start:imports + use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; use dep::aztec::{ context::{PrivateContext, Context}, oracle::get_secret_key::get_secret_key, // used to compute nullifier @@ -11,7 +12,6 @@ contract EasyPrivateVoting { field_serialization::{ FieldSerializationMethods, FIELD_SERIALIZED_LEN}, }, types::address::{AztecAddress}, - constants_gen::EMPTY_NULLIFIED_COMMITMENT, }; // docs:end:imports // docs:start:storage_struct diff --git a/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml index 7f5a66f3819..9ef152ea1f6 100644 --- a/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/import_test_contract/Nargo.toml @@ -5,4 +5,5 @@ compiler_version = ">=0.18.0" type = "contract" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } \ No newline at end of file +aztec = { path = "../../../../aztec-nr/aztec" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml index b11305196cd..3f884034998 100644 --- a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/Nargo.toml @@ -6,4 +6,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } -value_note = { path = "../../../../aztec-nr/value-note" } \ No newline at end of file +value_note = { path = "../../../../aztec-nr/value-note" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr index 462f95091be..d9235ccd6ee 100644 --- a/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/inclusion_proofs_contract/src/main.nr @@ -2,6 +2,11 @@ mod utils; // A demonstration of inclusion and non-inclusion proofs. contract InclusionProofs { + use dep::protocol_types::constants::{ + NOTE_HASH_TREE_HEIGHT, + PUBLIC_DATA_TREE_HEIGHT, + GENERATOR_INDEX__PUBLIC_LEAF_INDEX, + }; use dep::std::merkle::compute_merkle_root; use dep::aztec::{ state_vars::{ @@ -20,13 +25,8 @@ contract InclusionProofs { note_header::NoteHeader, utils as note_utils, }, - constants_gen::{ - NOTE_HASH_TREE_HEIGHT, - PUBLIC_DATA_TREE_HEIGHT, - GENERATOR_INDEX__PUBLIC_LEAF_INDEX, - }, + oracle::{ - get_block_header::get_block_header, get_membership_witness::{ get_membership_witness, MembershipWitness, @@ -104,6 +104,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -147,6 +148,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -185,9 +187,9 @@ contract InclusionProofs { ); // 5.c) Prove that the low nullifier is pointing "over" the nullifier to prove that the nullifier is not - // included in the nullifier tree + // included in the nullifier tree (or to 0 if the to-be-inserted nullifier is the largest of all) assert( - full_field_greater_than(witness.leaf_data.next_value, nullifier), + full_field_greater_than(witness.leaf_data.next_value, nullifier) | (witness.leaf_data.next_index == 0), "Proving nullifier non-inclusion failed: low_nullifier.next_value > nullifier.value check failed" ); @@ -218,6 +220,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. @@ -250,6 +253,7 @@ contract InclusionProofs { // TODO: assert that block number is less than the block number of context.block_header // --> This will either require a new oracle method that returns block_header.global_variables_hash preimage // or modifying the private context so that we somehow expose it. + // Blocked by #3564 // 1) Get block header from oracle and ensure that the block hash is included in the current blocks tree // root. diff --git a/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml index 33b61c1b24d..ef3e9d65340 100644 --- a/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/lending_contract/Nargo.toml @@ -6,4 +6,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } -safe_math = { path = "../../../../aztec-nr/safe-math" } \ No newline at end of file +safe_math = { path = "../../../../aztec-nr/safe-math" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr b/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr index 53385ed3e99..6b2cfa5c629 100644 --- a/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr +++ b/yarn-project/noir-contracts/src/contracts/lending_contract/src/interfaces.nr @@ -4,7 +4,7 @@ use dep::aztec::context::{ }; use crate::asset::Asset; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; use dep::aztec::selector::compute_selector; struct PriceFeed { diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml index 319c15b9ad4..cc97a1ddeee 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/test_contract/Nargo.toml @@ -8,3 +8,4 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } field_note = { path = "../../../../aztec-nr/field-note" } token_portal_content_hash_lib = { path = "../token_portal_content_hash_lib" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr b/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr index 1bb62e9f3f6..51bd95002ae 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr +++ b/yarn-project/noir-contracts/src/contracts/test_contract/src/interface.nr @@ -2,7 +2,7 @@ use dep::std; use dep::aztec::context::{ PrivateContext, PublicContext }; -use dep::aztec::constants_gen::RETURN_VALUES_LENGTH; +use dep::protocol_types::constants::RETURN_VALUES_LENGTH; struct AStructTestCodeGenStruct { amount: Field, diff --git a/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr b/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr index 186b9220cfb..1515b39d8a1 100644 --- a/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr +++ b/yarn-project/noir-contracts/src/contracts/test_contract/src/main.nr @@ -1,7 +1,7 @@ // A contract used for testing a random hodgepodge of small features from simulator and end-to-end tests. contract Test { use dep::std::option::Option; - + use dep::protocol_types::constants::EMPTY_NULLIFIED_COMMITMENT; // docs:start:unencrypted_import use dep::aztec::log::emit_unencrypted_log; // docs:end:unencrypted_import @@ -24,7 +24,6 @@ contract Test { state_vars::immutable_singleton::ImmutableSingleton, log::emit_unencrypted_log_from_private, types::vec::BoundedVec, - constants_gen::EMPTY_NULLIFIED_COMMITMENT, }; use dep::token_portal_content_hash_lib::{get_mint_private_content_hash, get_mint_public_content_hash}; use dep::field_note::field_note::{FieldNote, FieldNoteMethods, FIELD_NOTE_LEN}; diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml index 25ad4026709..15cff362a4b 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/Nargo.toml @@ -8,4 +8,5 @@ type = "contract" aztec = { path = "../../../../aztec-nr/aztec" } safe_math = { path = "../../../../aztec-nr/safe-math" } field_note = { path = "../../../../aztec-nr/field-note" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr index 5c3da5897b6..ae54a03ea76 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr index 978676cc22e..55ef7ff36e9 100644 --- a/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr +++ b/yarn-project/noir-contracts/src/contracts/token_blacklist_contract/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -5,7 +6,6 @@ use dep::aztec::{ utils::compute_note_hash_for_read_or_nullify, }, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, hash::pedersen_hash, diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml b/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml index 13ddd27395c..4fc1814f588 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml +++ b/yarn-project/noir-contracts/src/contracts/token_contract/Nargo.toml @@ -7,4 +7,5 @@ type = "contract" [dependencies] aztec = { path = "../../../../aztec-nr/aztec" } safe_math = { path = "../../../../aztec-nr/safe-math" } -authwit = { path = "../../../../aztec-nr/authwit" } \ No newline at end of file +authwit = { path = "../../../../aztec-nr/authwit" } +protocol_types = { path = "../../../../noir-protocol-circuits/src/crates/types" } \ No newline at end of file diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr index 5c3da5897b6..ae54a03ea76 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr +++ b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/balance_set.nr @@ -1,8 +1,8 @@ use dep::std::option::Option; use dep::safe_math::SafeU120; +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ context::Context, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, types::address::AztecAddress, }; diff --git a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr index cc366cc2277..8c4f1721cf9 100644 --- a/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr +++ b/yarn-project/noir-contracts/src/contracts/token_contract/src/types/token_note.nr @@ -1,3 +1,4 @@ +use dep::protocol_types::constants::MAX_READ_REQUESTS_PER_CALL; use dep::aztec::{ note::{ note_header::NoteHeader, @@ -5,7 +6,6 @@ use dep::aztec::{ utils::compute_note_hash_for_read_or_nullify, }, context::PrivateContext, - constants_gen::MAX_READ_REQUESTS_PER_CALL, state_vars::set::Set, log::emit_encrypted_log, hash::pedersen_hash, diff --git a/yarn-project/noir-protocol-circuits/package.json b/yarn-project/noir-protocol-circuits/package.json index 2b4b8351e53..f5eccb835a5 100644 --- a/yarn-project/noir-protocol-circuits/package.json +++ b/yarn-project/noir-protocol-circuits/package.json @@ -12,7 +12,7 @@ "clean": "rm -rf ./dest .tsbuildinfo", "formatting": "run -T prettier --check ./src && run -T eslint ./src", "formatting:fix": "run -T eslint --fix ./src && run -T prettier -w ./src", - "noir:build": "cd src && ../../../noir/target/release/nargo compile && rm -rf ./target/debug_*", + "noir:build": "cd src && ../../../noir/target/release/nargo compile --silence-warnings && rm -rf ./target/debug_*", "noir:types": "yarn ts-node --esm src/scripts/generate_ts_from_abi.ts && yarn formatting:fix", "noir:test": "cd src && ../../../noir/target/release/nargo test", "test": "NODE_NO_WARNINGS=1 node --experimental-vm-modules $(yarn bin jest) --passWithNoTests" diff --git a/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap b/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap index 4cf926dc96a..850acabc9de 100644 --- a/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap +++ b/yarn-project/noir-protocol-circuits/src/__snapshots__/index.test.ts.snap @@ -104,7 +104,7 @@ exports[`Private kernel Executes private kernel init circuit for a contract depl KernelCircuitPublicInputs { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 10561895175368852737061915973188839857007468377789560793687187642867659280638n, "asBuffer": { "data": [ @@ -25720,7 +25720,7 @@ exports[`Private kernel Executes private kernel inner for a nested call 1`] = ` KernelCircuitPublicInputs { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 4141256197271035428567950264296887925803599654022881395228888440470800002298n, "asBuffer": { "data": [ @@ -51336,7 +51336,7 @@ exports[`Private kernel Executes private kernel ordering after a deployment 1`] KernelCircuitPublicInputsFinal { "constants": CombinedConstantData { "blockHeader": BlockHeader { - "blocksTreeRoot": Fr { + "archiveRoot": Fr { "asBigInt": 10561895175368852737061915973188839857007468377789560793687187642867659280638n, "asBuffer": { "data": [ diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml index 47c3f8af754..4132c7e2b18 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr index 8653d993183..c49b4843d22 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/common.nr @@ -1,11 +1,3 @@ -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_NULLIFIERS_PER_CALL, - MAX_NEW_L2_TO_L1_MSGS_PER_CALL, - MAX_NEW_COMMITMENTS_PER_CALL, - MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, - MAX_READ_REQUESTS_PER_CALL, -}; use dep::types::{ abis::{ call_request::CallRequest, @@ -21,6 +13,14 @@ use dep::types::{ }, address::{Address, EthAddress}, contrakt::deployment_data::ContractDeploymentData, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_NULLIFIERS_PER_CALL, + MAX_NEW_L2_TO_L1_MSGS_PER_CALL, + MAX_NEW_COMMITMENTS_PER_CALL, + MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, + MAX_READ_REQUESTS_PER_CALL, + }, hash::{ compute_constructor_hash, compute_l2_to_l1_hash, diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr index 88a34b4049c..b20c6b963b0 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_init.nr @@ -1,5 +1,4 @@ use crate::common; -use dep::aztec::constants_gen::EMPTY_NULLIFIED_COMMITMENT; use dep::std::unsafe; use dep::types::{ abis::{ @@ -7,6 +6,7 @@ use dep::types::{ private_kernel::private_call_data::PrivateCallData, kernel_circuit_public_inputs::{KernelCircuitPublicInputs, KernelCircuitPublicInputsBuilder}, }, + constants::EMPTY_NULLIFIED_COMMITMENT, mocked::{Proof, verify_previous_kernel_state}, transaction::request::TxRequest, utils::arrays::is_empty_array, @@ -117,7 +117,7 @@ impl PrivateKernelInputsInit { mod tests { use crate::private_kernel_init::PrivateKernelInputsInit; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_READ_REQUESTS_PER_CALL, }; use dep::types::{ diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr index cdee6d1b941..a89e4010bab 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_inner.nr @@ -81,7 +81,7 @@ impl PrivateKernelInputsInner { mod tests { use crate::private_kernel_inner::PrivateKernelInputsInner; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr index 057317729aa..0fe7d3283e9 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr @@ -1,10 +1,4 @@ use crate::common; -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_COMMITMENTS_PER_TX, - MAX_NEW_NULLIFIERS_PER_TX, - MAX_READ_REQUESTS_PER_TX, -}; use dep::std::unsafe; use dep::types::{ abis::{ @@ -15,6 +9,12 @@ use dep::types::{ KernelCircuitPublicInputsFinal, }, }, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_COMMITMENTS_PER_TX, + MAX_NEW_NULLIFIERS_PER_TX, + MAX_READ_REQUESTS_PER_TX, + }, hash::{ compute_commitment_nonce, compute_unique_siloed_commitment, @@ -160,7 +160,7 @@ impl PrivateKernelInputsOrdering { mod tests { use crate::private_kernel_ordering::PrivateKernelInputsOrdering; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_READ_REQUESTS_PER_TX, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml index 76525900176..669bac30a78 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr index 8bd199879b5..a83cda75565 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr @@ -17,6 +17,16 @@ use dep::types::{ storage_read::StorageRead, storage_update_request::StorageUpdateRequest, }, + constants::{ + EMPTY_NULLIFIED_COMMITMENT, + MAX_NEW_L2_TO_L1_MSGS_PER_CALL, + MAX_NEW_COMMITMENTS_PER_CALL, + MAX_NEW_NULLIFIERS_PER_CALL, + MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, + MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, + MAX_PUBLIC_DATA_READS_PER_CALL, + MAX_READ_REQUESTS_PER_CALL, + }, hash::{silo_commitment, silo_nullifier, compute_l2_to_l1_hash, accumulate_sha256}, utils::{ arrays::{array_length, array_to_bounded_vec, is_empty_array, struct_array_length, struct_array_to_bounded_vec}, @@ -26,17 +36,6 @@ use dep::types::{ }; use crate::hash::{compute_public_data_tree_index, compute_public_data_tree_value}; -use dep::aztec::constants_gen::{ - EMPTY_NULLIFIED_COMMITMENT, - MAX_NEW_L2_TO_L1_MSGS_PER_CALL, - MAX_NEW_COMMITMENTS_PER_CALL, - MAX_NEW_NULLIFIERS_PER_CALL, - MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, - MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, - MAX_PUBLIC_DATA_READS_PER_CALL, - MAX_READ_REQUESTS_PER_CALL, -}; - // Validates inputs to the kernel circuit that are common to all invocation scenarios. pub fn validate_inputs(public_call: PublicCallData){ // Validates commons inputs for all type of kernel inputs diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr index 7a3cc9a5f0e..1769f9f87f2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/hash.nr @@ -1,15 +1,13 @@ -use dep::types::address::Address; -use dep::aztec::{ - constants_gen, - constants_gen::{GENERATOR_INDEX__PUBLIC_LEAF_INDEX}, - hash::sha256_to_field, +use dep::types::{ + address::Address, + constants::GENERATOR_INDEX__PUBLIC_LEAF_INDEX, }; pub fn compute_public_data_tree_index(contract_address: Address, storage_slot: Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ contract_address.to_field(), storage_slot - ], constants_gen::GENERATOR_INDEX__PUBLIC_LEAF_INDEX) + ], GENERATOR_INDEX__PUBLIC_LEAF_INDEX) } pub fn compute_public_data_tree_value(value: Field) -> Field { diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr index 6527e18c741..550f14151e3 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_private_previous.nr @@ -88,7 +88,7 @@ mod tests { bounded_vec::BoundedVec, }, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr index 46d4f485504..58c5b2ab399 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/public_kernel_public_previous.nr @@ -76,7 +76,7 @@ mod tests { arrays::array_eq, }, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, }; diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml index d131d25b72c..a82bcabc543 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous-simulated/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml index a1aebfa70a6..a6bf17d1903 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-private-previous/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml index 5eb41be2441..9afbdf6b774 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous-simulated/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml index 9d972e291cc..f4b1e9b3557 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-public-previous/Nargo.toml @@ -7,4 +7,3 @@ compiler_version = ">=0.18.0" [dependencies] types = { path = "../types" } public_kernel_lib = { path = "../public-kernel-lib" } -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml index 9a1b9b54fda..0a1dcaa0012 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/Nargo.toml @@ -5,5 +5,4 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } types = { path = "../types" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr index 54546a9e9fb..4594e4ea4c3 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr @@ -1,5 +1,5 @@ // TODO(Kev): This constant is also defined in private-kernel-lib -use dep::aztec::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::types::constants::NUM_FIELDS_PER_SHA256; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; use dep::types::mocked::AggregationObject; diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr index 410826c29c3..d784b16b5ff 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/constant_rollup_data.nr @@ -3,7 +3,7 @@ use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; struct ConstantRollupData { // The very latest roots as at the very beginning of the entire rollup: - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, + archive_snapshot : AppendOnlyTreeSnapshot, // TODO(Sean): Some members of this struct tbd private_kernel_vk_tree_root : Field, @@ -16,7 +16,7 @@ struct ConstantRollupData { impl ConstantRollupData { pub fn eq(self, other : ConstantRollupData) -> bool { - self.start_blocks_tree_snapshot.eq(other.start_blocks_tree_snapshot) & + self.archive_snapshot.eq(other.archive_snapshot) & self.global_variables.eq(other.global_variables) & (self.private_kernel_vk_tree_root == other.private_kernel_vk_tree_root) & (self.public_kernel_vk_tree_root == other.public_kernel_vk_tree_root) & diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr index ba521f5f196..71e88760209 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/global_variables.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use dep::types::constants::GENERATOR_INDEX__GLOBAL_VARIABLES; struct GlobalVariables { chain_id : Field, @@ -16,7 +16,7 @@ impl GlobalVariables { self.block_number, self.timestamp ], - constants_gen::GENERATOR_INDEX__GLOBAL_VARIABLES, + GENERATOR_INDEX__GLOBAL_VARIABLES, ) } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr index bd848be2463..db92815baa0 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/previous_rollup_data.nr @@ -1,6 +1,6 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; use dep::types::abis::membership_witness::VKMembershipWitness; -use dep::aztec::constants_gen::ROLLUP_VK_TREE_HEIGHT; +use dep::types::constants::ROLLUP_VK_TREE_HEIGHT; use dep::types::mocked::{Proof, VerificationKey}; struct PreviousRollupData{ diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr index a453771ce15..79ef5693a1d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/base/base_rollup_inputs.nr @@ -8,7 +8,7 @@ use dep::types::utils::uint256::U256; use dep::types::abis::public_data_update_request::PublicDataUpdateRequest; use dep::types::abis::public_data_read::PublicDataRead; use dep::types::mocked::{AggregationObject, Proof}; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH, NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH, @@ -33,7 +33,7 @@ use dep::aztec::constants_gen::{ }; use dep::types::abis::previous_kernel_data::PreviousKernelData; use dep::types::abis::membership_witness::{NullifierMembershipWitness, MembershipWitness}; -use dep::types::abis::membership_witness::BlocksTreeRootMembershipWitness; +use dep::types::abis::membership_witness::ArchiveRootMembershipWitness; struct BaseRollupInputs { kernel_data: [PreviousKernelData; KERNELS_PER_BASE_ROLLUP], @@ -41,7 +41,7 @@ struct BaseRollupInputs { start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot, start_contract_tree_snapshot: AppendOnlyTreeSnapshot, start_public_data_tree_root: Field, - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot, + archive_snapshot: AppendOnlyTreeSnapshot, sorted_new_nullifiers: [Field; MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP], sorted_new_nullifiers_indexes: [u32; MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP], @@ -56,7 +56,7 @@ struct BaseRollupInputs { new_public_data_update_requests_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_BASE_ROLLUP], new_public_data_reads_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP], - blocks_tree_root_membership_witnesses: [BlocksTreeRootMembershipWitness; KERNELS_PER_BASE_ROLLUP], + archive_root_membership_witnesses: [ArchiveRootMembershipWitness; KERNELS_PER_BASE_ROLLUP], constants: ConstantRollupData, } @@ -113,7 +113,7 @@ impl BaseRollupInputs { let calldata_hash = BaseRollupInputs::components_compute_kernel_calldata_hash(self.kernel_data); // Perform membership checks that the notes provided exist within the historical trees data - self.perform_blocks_tree_membership_checks(); + self.perform_archive_membership_checks(); let aggregation_object = self.aggregate_proofs(); @@ -375,24 +375,24 @@ impl BaseRollupInputs { // Check that the block header used by each kernel is a member of the blocks tree --> since the block header // contains roots of all the trees this is sufficient to verify that the tree roots used by kernels are correct - fn perform_blocks_tree_membership_checks(self) { + fn perform_archive_membership_checks(self) { // For each of the block header (their block hashes), we need to do an inclusion proof // against the blocks tree root from the beginning of a rollup provided in the rollup constants - let blocks_treee_root = self.constants.start_blocks_tree_snapshot.root; + let archive_root = self.constants.archive_snapshot.root; for i in 0..KERNELS_PER_BASE_ROLLUP { // Rebuild the block hash let block_header = self.kernel_data[i].public_inputs.constants.block_header; let previous_block_hash = block_header.block.hash(); - let previous_block_hash_witness = self.blocks_tree_root_membership_witnesses[i]; + let previous_block_hash_witness = self.archive_root_membership_witnesses[i]; // Now check that the previous block hash is in the blocks tree from the beginning of the rollup components::assert_check_membership( previous_block_hash, previous_block_hash_witness.leaf_index, previous_block_hash_witness.sibling_path, - blocks_treee_root + archive_root ); } } @@ -538,11 +538,11 @@ mod tests { tests::merkle_tree_utils::{NonEmptyMerkleTree, compute_zero_hashes}, components, }; - use dep::aztec::constants_gen::{ + use dep::types::constants::{ CONTRACT_SUBTREE_SIBLING_PATH_LENGTH, CONTRACT_TREE_HEIGHT, CONTRACT_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, KERNELS_PER_BASE_ROLLUP, MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP, @@ -554,9 +554,10 @@ mod tests { NULLIFIER_TREE_HEIGHT, NULLIFIER_SUBTREE_HEIGHT, PUBLIC_DATA_TREE_HEIGHT, + NUM_FIELDS_PER_SHA256, }; use dep::types::{ - abis::membership_witness::BlocksTreeRootMembershipWitness, + abis::membership_witness::ArchiveRootMembershipWitness, abis::membership_witness::NullifierMembershipWitness, abis::new_contract_data::NewContractData, abis::public_data_read::PublicDataRead, @@ -731,13 +732,13 @@ mod tests { let mut start_public_data_tree = NonEmptyMerkleTree::new(self.pre_existing_public_data, [0; PUBLIC_DATA_TREE_HEIGHT], [0; PUBLIC_DATA_TREE_HEIGHT - 5], [0; 5]); let start_public_data_tree_root = start_public_data_tree.get_root(); - let start_blocks_tree = NonEmptyMerkleTree::new(self.pre_existing_blocks, [0; BLOCKS_TREE_HEIGHT], [0; BLOCKS_TREE_HEIGHT - 1], [0; 1]); - let start_blocks_tree_snapshot = AppendOnlyTreeSnapshot { - root: start_blocks_tree.get_root(), - next_available_leaf_index: start_blocks_tree.get_next_available_index() as u32, + let start_archive = NonEmptyMerkleTree::new(self.pre_existing_blocks, [0; ARCHIVE_HEIGHT], [0; ARCHIVE_HEIGHT - 1], [0; 1]); + let archive_snapshot = AppendOnlyTreeSnapshot { + root: start_archive.get_root(), + next_available_leaf_index: start_archive.get_next_available_index() as u32, }; - self.constants.start_blocks_tree_snapshot = start_blocks_tree_snapshot; + self.constants.archive_snapshot = archive_snapshot; let mut new_public_data_reads_sibling_paths: [[Field; PUBLIC_DATA_TREE_HEIGHT]; MAX_PUBLIC_DATA_READS_PER_BASE_ROLLUP] = dep::std::unsafe::zeroed(); @@ -786,7 +787,7 @@ mod tests { start_nullifier_tree_snapshot, start_contract_tree_snapshot, start_public_data_tree_root, - start_blocks_tree_snapshot, + archive_snapshot, sorted_new_nullifiers, sorted_new_nullifiers_indexes, @@ -800,14 +801,14 @@ mod tests { new_public_data_update_requests_sibling_paths, new_public_data_reads_sibling_paths, - blocks_tree_root_membership_witnesses: [ - BlocksTreeRootMembershipWitness { + archive_root_membership_witnesses: [ + ArchiveRootMembershipWitness { leaf_index: 0, - sibling_path: start_blocks_tree.get_sibling_path(0) + sibling_path: start_archive.get_sibling_path(0) }, - BlocksTreeRootMembershipWitness { + ArchiveRootMembershipWitness { leaf_index: 1, - sibling_path: start_blocks_tree.get_sibling_path(1) + sibling_path: start_archive.get_sibling_path(1) }, ], @@ -1144,10 +1145,10 @@ mod tests { } #[test(should_fail_with = "membership check failed")] - unconstrained fn compute_membership_blocks_tree_negative() { + unconstrained fn compute_membership_archive_negative() { let mut inputs = BaseRollupInputsBuilder::new().build_inputs(); - inputs.blocks_tree_root_membership_witnesses[0].sibling_path[0] = 27; + inputs.archive_root_membership_witnesses[0].sibling_path[0] = 27; let _output = inputs.base_rollup_circuit(); } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr index 4eb6d8bffd8..2a67a1acd96 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr @@ -2,7 +2,7 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInpu use dep::types::mocked::AggregationObject; use dep::types::hash::{accumulate_sha256, assert_check_membership, root_from_sibling_path}; use dep::types::utils::uint128::U128; -use dep::aztec::constants_gen::NUM_FIELDS_PER_SHA256; +use dep::types::constants::NUM_FIELDS_PER_SHA256; use crate::abis::previous_rollup_data::PreviousRollupData; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr index c024f1695e2..55f7d242e7e 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/hash.nr @@ -1,5 +1,5 @@ use crate::abis::global_variables::GlobalVariables; -use dep::aztec::constants_gen; +use dep::types::constants::GENERATOR_INDEX__BLOCK_HASH; pub fn compute_block_hash_with_globals( globals : GlobalVariables, @@ -11,5 +11,5 @@ pub fn compute_block_hash_with_globals( let inputs = [globals.hash(), note_hash_tree_root, nullifier_tree_root, contract_tree_root, l1_to_l2_messages_tree_root, public_data_tree_root]; - dep::std::hash::pedersen_hash_with_separator(inputs, constants_gen::GENERATOR_INDEX__BLOCK_HASH) + dep::std::hash::pedersen_hash_with_separator(inputs, GENERATOR_INDEX__BLOCK_HASH) } \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index d6052cee5e2..a2e29866aca 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -4,7 +4,7 @@ mod root_rollup_public_inputs; use root_rollup_public_inputs::RootRollupPublicInputs; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use dep::types::utils::uint256::U256; -use dep::aztec::constants_gen::{NUM_FIELDS_PER_SHA256,NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,L1_TO_L2_MSG_SUBTREE_HEIGHT}; +use dep::types::constants::{NUM_FIELDS_PER_SHA256,NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP,L1_TO_L2_MSG_SUBTREE_HEIGHT}; use crate::{components, hash::compute_block_hash_with_globals}; use crate::merkle_tree::{calculate_subtree, calculate_empty_tree_root}; @@ -37,7 +37,7 @@ impl RootRollupInputs { ); // Build the block hash for this iteration from the tree roots and global variables - // Then insert the block into the blocks tree + // Then insert the block into the archive tree let block_hash = compute_block_hash_with_globals(left.constants.global_variables, right.end_note_hash_tree_snapshot.root, right.end_nullifier_tree_snapshot.root, @@ -45,10 +45,10 @@ impl RootRollupInputs { new_l1_to_l2_messages_tree_snapshot.root, right.end_public_data_tree_root); - // Update the blocks tree - let end_blocks_tree_snapshot = components::insert_subtree_to_snapshot_tree( - self.start_blocks_tree_snapshot, - self.new_blocks_tree_sibling_path, + // Update the archive + let end_archive_snapshot = components::insert_subtree_to_snapshot_tree( + self.start_archive_snapshot, + self.new_archive_sibling_path, 0, block_hash, 0 @@ -72,19 +72,10 @@ impl RootRollupInputs { end_public_data_tree_root : right.end_public_data_tree_root, start_l1_to_l2_messages_tree_snapshot : self.start_l1_to_l2_messages_tree_snapshot, end_l1_to_l2_messages_tree_snapshot : new_l1_to_l2_messages_tree_snapshot, - start_blocks_tree_snapshot : self.start_blocks_tree_snapshot, - end_blocks_tree_snapshot : end_blocks_tree_snapshot, + start_archive_snapshot : self.start_archive_snapshot, + end_archive_snapshot : end_archive_snapshot, calldata_hash : components::compute_calldata_hash(self.previous_rollup_data), l1_to_l2_messages_hash : compute_messages_hash(self.new_l1_to_l2_messages), - - // The cpp code was just not initializing these, so they would be zeroed out - // TODO(Lasse/Jean): add explanation for this. - end_tree_of_historical_contract_tree_roots_snapshot : zeroed_out_snapshot, - end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot : zeroed_out_snapshot, - end_tree_of_historical_note_hash_tree_roots_snapshot : zeroed_out_snapshot, - start_tree_of_historical_contract_tree_roots_snapshot : zeroed_out_snapshot, - start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot : zeroed_out_snapshot, - start_tree_of_historical_note_hash_tree_roots_snapshot : zeroed_out_snapshot, } } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr index 5c2fa4075ff..37f64928e8b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_inputs.nr @@ -3,10 +3,10 @@ use crate::abis::previous_rollup_data::PreviousRollupData; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ NUMBER_OF_L1_L2_MESSAGES_PER_ROLLUP, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, - BLOCKS_TREE_HEIGHT + ARCHIVE_HEIGHT }; struct RootRollupInputs { @@ -20,6 +20,6 @@ struct RootRollupInputs { start_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, // inputs required to add the block hash - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, - new_blocks_tree_sibling_path : [Field; BLOCKS_TREE_HEIGHT], + start_archive_snapshot : AppendOnlyTreeSnapshot, + new_archive_sibling_path : [Field; ARCHIVE_HEIGHT], } \ No newline at end of file diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr index 9b15ca2955f..c4c2f931d8b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root/root_rollup_public_inputs.nr @@ -2,7 +2,7 @@ use crate::abis::nullifier_leaf_preimage::NullifierLeafPreimage; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::abis::constant_rollup_data::ConstantRollupData; use crate::abis::global_variables::GlobalVariables; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ NUM_FIELDS_PER_SHA256 }; use dep::types::mocked::AggregationObject; @@ -26,20 +26,11 @@ struct RootRollupPublicInputs { start_public_data_tree_root : Field, end_public_data_tree_root : Field, - start_tree_of_historical_note_hash_tree_roots_snapshot : AppendOnlyTreeSnapshot, - end_tree_of_historical_note_hash_tree_roots_snapshot : AppendOnlyTreeSnapshot, - - start_tree_of_historical_contract_tree_roots_snapshot : AppendOnlyTreeSnapshot, - end_tree_of_historical_contract_tree_roots_snapshot : AppendOnlyTreeSnapshot, - start_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, end_l1_to_l2_messages_tree_snapshot : AppendOnlyTreeSnapshot, - start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot : AppendOnlyTreeSnapshot, - end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot : AppendOnlyTreeSnapshot, - - start_blocks_tree_snapshot : AppendOnlyTreeSnapshot, - end_blocks_tree_snapshot : AppendOnlyTreeSnapshot, + start_archive_snapshot : AppendOnlyTreeSnapshot, + end_archive_snapshot : AppendOnlyTreeSnapshot, calldata_hash : [Field; NUM_FIELDS_PER_SHA256], l1_to_l2_messages_hash : [Field; NUM_FIELDS_PER_SHA256], diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr index 7da05d2df6d..0a095423bc6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/tests/root_rollup_inputs.nr @@ -3,11 +3,11 @@ use crate::{ root_rollup_inputs::RootRollupInputs, }, }; -use dep::aztec::constants_gen::{ +use dep::types::constants::{ L1_TO_L2_MSG_TREE_HEIGHT, L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH, L1_TO_L2_MSG_SUBTREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, }; use crate::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; use crate::tests::previous_rollup_data::default_previous_rollup_data; @@ -25,10 +25,10 @@ pub fn compute_l1_l2_empty_snapshot() -> (AppendOnlyTreeSnapshot, [Field; L1_TO_ (AppendOnlyTreeSnapshot{ root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, new_l1_to_l2_messages_tree_root_sibling_path) } -pub fn compute_blocks_tree_snapshot() -> (AppendOnlyTreeSnapshot, [Field; BLOCKS_TREE_HEIGHT]) { - let zero_hashes = compute_zero_hashes([0; BLOCKS_TREE_HEIGHT]); - let mut sibling_path = [0; BLOCKS_TREE_HEIGHT]; - for i in 1..BLOCKS_TREE_HEIGHT { +pub fn compute_archive_snapshot() -> (AppendOnlyTreeSnapshot, [Field; ARCHIVE_HEIGHT]) { + let zero_hashes = compute_zero_hashes([0; ARCHIVE_HEIGHT]); + let mut sibling_path = [0; ARCHIVE_HEIGHT]; + for i in 1..ARCHIVE_HEIGHT { sibling_path[i] = zero_hashes[i-1]; } (AppendOnlyTreeSnapshot { root: zero_hashes[zero_hashes.len() - 1], next_available_leaf_index: 0 }, sibling_path) @@ -42,10 +42,10 @@ pub fn default_root_rollup_inputs() -> RootRollupInputs { inputs.new_l1_to_l2_messages_tree_root_sibling_path = l1_l2_empty_sibling_path; inputs.start_l1_to_l2_messages_tree_snapshot = l1_l2_empty_snapshot; - let (blocks_snapshot, blocks_sibling_path) = compute_blocks_tree_snapshot(); + let (blocks_snapshot, blocks_sibling_path) = compute_archive_snapshot(); - inputs.start_blocks_tree_snapshot = blocks_snapshot; - inputs.new_blocks_tree_sibling_path = blocks_sibling_path; + inputs.start_archive_snapshot = blocks_snapshot; + inputs.new_archive_sibling_path = blocks_sibling_path; inputs.previous_rollup_data = default_previous_rollup_data(); diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml b/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml index 5a7509f4318..6c8b6657f62 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml +++ b/yarn-project/noir-protocol-circuits/src/crates/types/Nargo.toml @@ -5,4 +5,3 @@ authors = [""] compiler_version = ">=0.18.0" [dependencies] -aztec = { path = "../../../../aztec-nr/aztec" } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr index 9616da0aa8c..390c59c94aa 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/block_header.nr @@ -1,7 +1,7 @@ use crate::block::Block; struct BlockHeader { - blocks_tree_root : Field, + archive_root : Field, block : Block, // Private data // This is marked in the cpp code as an enhancement @@ -24,7 +24,7 @@ impl BlockHeader { self.block.nullifier_tree_root, self.block.contract_tree_root, self.block.l1_to_l2_messages_tree_root, - self.blocks_tree_root, + self.archive_root, self.block.public_data_tree_root, self.block.global_variables_hash ] diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr index 37259a7e675..012ab922e5d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_context.nr @@ -1,6 +1,6 @@ use crate::abis::function_selector::FunctionSelector; use crate::address::{EthAddress,Address}; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CALL_CONTEXT; struct CallContext{ msg_sender : Address, @@ -24,7 +24,7 @@ impl CallContext { self.is_delegate_call as Field, self.is_static_call as Field, self.is_contract_deployment as Field, - ], constants_gen::GENERATOR_INDEX__CALL_CONTEXT) + ], GENERATOR_INDEX__CALL_CONTEXT) } fn assert_is_zero(self) { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr index fc6a754c50d..ea323fba8fe 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/call_stack_item.nr @@ -4,7 +4,7 @@ use crate::abis::{ public_circuit_public_inputs::PublicCircuitPublicInputs, }; use crate::address::Address; -use dep::aztec::constants_gen::{ +use crate::constants::{ GENERATOR_INDEX__CALL_STACK_ITEM, }; diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr index 6d39c700d92..e48dc871220 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/combined_accumulated_data.nr @@ -10,7 +10,7 @@ use crate::{ mocked::AggregationObject, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_TX, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr index ad812b626a9..438ae6c61c6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_data.nr @@ -1,5 +1,5 @@ use crate::abis::function_selector::FunctionSelector; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__FUNCTION_DATA; struct FunctionData { // First four bytes of the abi encoding @@ -21,6 +21,6 @@ impl FunctionData { self.is_internal as Field, self.is_private as Field, self.is_constructor as Field, - ], constants_gen::GENERATOR_INDEX__FUNCTION_DATA) + ], GENERATOR_INDEX__FUNCTION_DATA) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr index 022ec5a718e..2a66a161736 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/function_leaf_preimage.nr @@ -1,5 +1,5 @@ use crate::abis::function_selector::FunctionSelector; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__FUNCTION_LEAF; struct FunctionLeafPreimage { selector : FunctionSelector, @@ -17,6 +17,6 @@ impl FunctionLeafPreimage { self.is_private as Field, self.vk_hash, self.acir_hash - ], constants_gen::GENERATOR_INDEX__FUNCTION_LEAF) + ], GENERATOR_INDEX__FUNCTION_LEAF) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr index 6b4c29dcb71..efead3d78a2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/membership_witness.nr @@ -1,11 +1,11 @@ -use dep::aztec::constants_gen::{ +use crate::constants::{ CONTRACT_TREE_HEIGHT, FUNCTION_TREE_HEIGHT, KERNELS_PER_BASE_ROLLUP, NULLIFIER_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT, ROLLUP_VK_TREE_HEIGHT, - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, }; struct MembershipWitness { @@ -37,9 +37,9 @@ struct NullifierMembershipWitness{ sibling_path: [Field; NULLIFIER_TREE_HEIGHT] } -struct BlocksTreeRootMembershipWitness{ +struct ArchiveRootMembershipWitness{ leaf_index: Field, - sibling_path: [Field; BLOCKS_TREE_HEIGHT] + sibling_path: [Field; ARCHIVE_HEIGHT] } struct ReadRequestMembershipWitness { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr index aba0cda1789..784b698da5b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/new_contract_data.nr @@ -1,5 +1,5 @@ use crate::address::{Address, EthAddress}; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CONTRACT_LEAF; struct NewContractData { contract_address: Address, @@ -36,7 +36,7 @@ impl NewContractData { self.contract_address.to_field(), self.portal_contract_address.to_field(), self.function_tree_root, - ], constants_gen::GENERATOR_INDEX__CONTRACT_LEAF) + ], GENERATOR_INDEX__CONTRACT_LEAF) } } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr index d65dc83a883..5733acef236 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/previous_kernel_data.nr @@ -1,5 +1,5 @@ use crate::mocked::{Proof, VerificationKey}; -use dep::aztec::constants_gen::VK_TREE_HEIGHT; +use crate::constants::VK_TREE_HEIGHT; use crate::abis::kernel_circuit_public_inputs::KernelCircuitPublicInputs; //TODO(sean): left a note saying that this should not be called Previous diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr index b13e029a926..8bee165fda4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_circuit_public_inputs.nr @@ -7,9 +7,7 @@ use crate::{ hash::NUM_FIELDS_PER_SHA256, utils::bounded_vec::BoundedVec, }; -use dep::aztec::{ - constants_gen, - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, @@ -18,7 +16,8 @@ use dep::aztec::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, RETURN_VALUES_LENGTH, - } + PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, + GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS, }; struct PrivateCircuitPublicInputs { @@ -57,7 +56,7 @@ struct PrivateCircuitPublicInputs { impl PrivateCircuitPublicInputs { // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3059) : Reuse aztec-nr fn hash(self) -> Field { - let mut fields: BoundedVec = BoundedVec::new(0); + let mut fields: BoundedVec = BoundedVec::new(0); fields.push(self.call_context.hash()); fields.push(self.args_hash); fields.push_array(self.return_values); @@ -78,8 +77,8 @@ impl PrivateCircuitPublicInputs { fields.push(self.chain_id); fields.push(self.version); - assert_eq(fields.len(), constants_gen::PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PrivateCircuitPublicInputs"); + assert_eq(fields.len(), PRIVATE_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PrivateCircuitPublicInputs"); - dep::std::hash::pedersen_hash_with_separator(fields.storage, constants_gen::GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS) + dep::std::hash::pedersen_hash_with_separator(fields.storage, GENERATOR_INDEX__PRIVATE_CIRCUIT_PUBLIC_INPUTS) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr index 2ec38255474..23414ce6530 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/private_kernel/private_call_data.nr @@ -9,7 +9,7 @@ use crate::abis::{ }; use crate::address::EthAddress; use crate::mocked::{Proof, VerificationKey}; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_READ_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr index 6310f119440..bc59e74e109 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_call_data.nr @@ -6,7 +6,7 @@ use crate::{ address::EthAddress, mocked::Proof, }; -use dep::aztec::constants_gen::MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL; +use crate::constants::MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL; struct PublicCallData { call_stack_item: PublicCallStackItem, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr index abcaf60e7af..69b6ab7af68 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_circuit_public_inputs.nr @@ -1,5 +1,4 @@ -use dep::aztec::constants_gen; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, @@ -7,6 +6,8 @@ use dep::aztec::constants_gen::{ MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, RETURN_VALUES_LENGTH, + GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS, + PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH }; use crate::{ abis::{ @@ -50,7 +51,7 @@ struct PublicCircuitPublicInputs{ impl PublicCircuitPublicInputs{ // TODO(https://github.com/AztecProtocol/aztec-packages/issues/3059): Reuse aztec-nr fn hash(self) -> Field { - let mut inputs: BoundedVec = BoundedVec::new(0); + let mut inputs: BoundedVec = BoundedVec::new(0); inputs.push(self.call_context.hash()); inputs.push(self.args_hash); inputs.push_array(self.return_values); @@ -69,8 +70,8 @@ impl PublicCircuitPublicInputs{ inputs.push_array(self.block_header.to_array()); inputs.push(self.prover_address.to_field()); - assert_eq(inputs.len(), constants_gen::PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PublicCircuitPublicInputs"); + assert_eq(inputs.len(), PUBLIC_CIRCUIT_PUBLIC_INPUTS_HASH_INPUT_LENGTH, "Incorrect number of input fields when hashing PublicCircuitPublicInputs"); - dep::std::hash::pedersen_hash_with_separator(inputs.storage, constants_gen::GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS) + dep::std::hash::pedersen_hash_with_separator(inputs.storage, GENERATOR_INDEX__PUBLIC_CIRCUIT_PUBLIC_INPUTS) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr index 94c8a02d087..0fefc927168 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_read.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; struct PublicDataRead { leaf_index : Field, @@ -10,7 +10,7 @@ impl PublicDataRead { dep::std::hash::pedersen_hash_with_separator([ self.leaf_index, self.value, - ], constants_gen::GENERATOR_INDEX__PUBLIC_DATA_READ) + ], GENERATOR_INDEX__PUBLIC_DATA_READ) } pub fn empty() -> Self { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr index 458d6ec9549..2e6c51ccaa7 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/abis/public_data_update_request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; struct PublicDataUpdateRequest { leaf_index : Field, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr index dc15b932bdd..a692491d3f4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/block.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__BLOCK_HASH; // This is not in the cpp code. struct Block { @@ -31,6 +31,6 @@ impl Block { self.contract_tree_root, self.l1_to_l2_messages_tree_root, self.public_data_tree_root, - ], constants_gen::GENERATOR_INDEX__BLOCK_HASH) + ], GENERATOR_INDEX__BLOCK_HASH) } } \ No newline at end of file diff --git a/yarn-project/aztec-nr/aztec/src/constants_gen.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr similarity index 98% rename from yarn-project/aztec-nr/aztec/src/constants_gen.nr rename to yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr index d1d77e5b81c..f8a5c60bb62 100644 --- a/yarn-project/aztec-nr/aztec/src/constants_gen.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/constants.nr @@ -73,7 +73,7 @@ global CONTRACT_SUBTREE_SIBLING_PATH_LENGTH: Field = 15; global NOTE_HASH_SUBTREE_HEIGHT: Field = 7; global NOTE_HASH_SUBTREE_SIBLING_PATH_LENGTH: Field = 25; global NULLIFIER_SUBTREE_HEIGHT: Field = 7; -global BLOCKS_TREE_HEIGHT: Field = 16; +global ARCHIVE_HEIGHT: Field = 16; global NULLIFIER_SUBTREE_SIBLING_PATH_LENGTH: Field = 13; global L1_TO_L2_MSG_SUBTREE_HEIGHT: Field = 4; global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: Field = 12; @@ -82,6 +82,9 @@ global L1_TO_L2_MSG_SUBTREE_SIBLING_PATH_LENGTH: Field = 12; global FUNCTION_SELECTOR_NUM_BYTES: Field = 4; global MAPPING_SLOT_PEDERSEN_SEPARATOR: Field = 4; global NUM_FIELDS_PER_SHA256: Field = 2; +global ARGS_HASH_CHUNK_LENGTH: u32 = 32; +global ARGS_HASH_CHUNK_COUNT: u32 = 16; + // NOIR CONSTANTS - constants used only in yarn-packages/noir-contracts // Some are defined here because Noir doesn't yet support globals referencing other globals yet. diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr index d43f3d5ab2c..083122ff942 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/deployment_data.nr @@ -1,6 +1,6 @@ use crate::address::EthAddress; use crate::point::Point; -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA; struct ContractDeploymentData { deployer_public_key : Point, @@ -27,6 +27,6 @@ impl ContractDeploymentData { self.function_tree_root, self.contract_address_salt, self.portal_contract_address.to_field() - ], constants_gen::GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA) + ], GENERATOR_INDEX__CONTRACT_DEPLOYMENT_DATA) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr index cc7911471f1..71de424e162 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_read.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_READ; struct StorageRead{ storage_slot: Field, @@ -17,7 +17,7 @@ impl StorageRead { dep::std::hash::pedersen_hash_with_separator([ self.storage_slot, self.current_value, - ], constants_gen::GENERATOR_INDEX__PUBLIC_DATA_READ) + ], GENERATOR_INDEX__PUBLIC_DATA_READ) } pub fn is_empty(self) -> bool { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr index ba33f3985ba..7d1f90b0d89 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/contrakt/storage_update_request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; +use crate::constants::GENERATOR_INDEX__PUBLIC_DATA_UPDATE_REQUEST; struct StorageUpdateRequest{ storage_slot : Field, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr index a331b8046be..a792107a9cd 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr @@ -8,13 +8,70 @@ use crate::abis::function_data::FunctionData; use crate::utils::uint128::U128; use crate::utils::uint256::U256; use crate::utils::bounded_vec::BoundedVec; - -use dep::aztec::{ - constants_gen, - constants_gen::{CONTRACT_TREE_HEIGHT, FUNCTION_TREE_HEIGHT, NOTE_HASH_TREE_HEIGHT}, - hash::sha256_to_field, +use crate::constants::{ + ARGS_HASH_CHUNK_COUNT, + ARGS_HASH_CHUNK_LENGTH, + CONTRACT_TREE_HEIGHT, + FUNCTION_TREE_HEIGHT, + NOTE_HASH_TREE_HEIGHT, + GENERATOR_INDEX__SILOED_COMMITMENT, + GENERATOR_INDEX__OUTER_NULLIFIER, + GENERATOR_INDEX__VK, + GENERATOR_INDEX__CONSTRUCTOR, + GENERATOR_INDEX__PARTIAL_ADDRESS, + GENERATOR_INDEX__CONTRACT_ADDRESS, + GENERATOR_INDEX__COMMITMENT_NONCE, + GENERATOR_INDEX__UNIQUE_COMMITMENT, + GENERATOR_INDEX__FUNCTION_ARGS, }; +use dep::std::hash::sha256; + +pub fn sha256_to_field(bytes_to_hash: [u8; N]) -> Field { + let sha256_hashed = sha256(bytes_to_hash); + + // Convert it to a field element + let mut v = 1; + let mut high = 0 as Field; + let mut low = 0 as Field; + + for i in 0..16 { + high = high + (sha256_hashed[15 - i] as Field) * v; + low = low + (sha256_hashed[16 + 15 - i] as Field) * v; + v = v * 256; + } + + // Abuse that a % p + b % p = (a + b) % p and that low < p + let hash_in_a_field = low + high * v; + + hash_in_a_field +} + +pub fn hash_args(args: [Field; N]) -> Field { + if args.len() == 0 { + 0 + } else { + let mut chunks_hashes = [0; ARGS_HASH_CHUNK_COUNT]; + for i in 0..ARGS_HASH_CHUNK_COUNT { + let mut chunk_hash = 0; + let start_chunk_index = i * ARGS_HASH_CHUNK_LENGTH; + if start_chunk_index < (args.len() as u32) { + let mut chunk_args = [0; ARGS_HASH_CHUNK_LENGTH]; + for j in 0..ARGS_HASH_CHUNK_LENGTH { + let item_index = i * ARGS_HASH_CHUNK_LENGTH + j; + if item_index < (args.len() as u32) { + chunk_args[j] = args[item_index]; + } + } + chunk_hash = dep::std::hash::pedersen_hash_with_separator(chunk_args, GENERATOR_INDEX__FUNCTION_ARGS); + } + chunks_hashes[i] = chunk_hash; + } + dep::std::hash::pedersen_hash_with_separator(chunks_hashes, GENERATOR_INDEX__FUNCTION_ARGS) + } +} + + // Checks that `value` is a member of a merkle tree with root `root` at position `index` // The witness being the `sibling_path` pub fn assert_check_membership(value : Field, index : Field, sibling_path : [Field; N], root : Field) { @@ -94,14 +151,14 @@ pub fn silo_commitment(address : Address, inner_commitment : Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ address.to_field(), inner_commitment, - ], constants_gen::GENERATOR_INDEX__SILOED_COMMITMENT) + ], GENERATOR_INDEX__SILOED_COMMITMENT) } pub fn silo_nullifier(address : Address, nullifier : Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ address.to_field(), nullifier, - ], constants_gen::GENERATOR_INDEX__OUTER_NULLIFIER) + ], GENERATOR_INDEX__OUTER_NULLIFIER) } fn merkle_hash(left : Field, right : Field) -> Field { @@ -112,7 +169,7 @@ pub fn stdlib_recursion_verification_key_compress_native_vk(_vk : VerificationKe // Original cpp code // stdlib::recursion::verification_key::compress_native(private_call.vk, GeneratorIndex::VK); // The above cpp method is only ever called on verification key, so it has been special cased here - let _hash_index = constants_gen::GENERATOR_INDEX__VK; + let _hash_index = GENERATOR_INDEX__VK; 0 } @@ -143,7 +200,7 @@ pub fn compute_constructor_hash(function_data : FunctionData, args_hash : Field, function_data_hash, args_hash, constructor_vk_hash - ], constants_gen::GENERATOR_INDEX__CONSTRUCTOR) + ], GENERATOR_INDEX__CONSTRUCTOR) } // sha256 hash is stored in two fields to accommodate all 256-bits of the hash @@ -201,7 +258,7 @@ pub fn compute_partial_address(contract_address_salt : Field, function_tree_root contract_address_salt, function_tree_root, constructor_hash - ],constants_gen::GENERATOR_INDEX__PARTIAL_ADDRESS) + ], GENERATOR_INDEX__PARTIAL_ADDRESS) } pub fn compute_contract_address_from_partial(point : Point, partial_address : Field) -> Address { @@ -209,7 +266,7 @@ pub fn compute_contract_address_from_partial(point : Point, partial_address : Fi point.x, point.y, partial_address - ],constants_gen::GENERATOR_INDEX__CONTRACT_ADDRESS); + ], GENERATOR_INDEX__CONTRACT_ADDRESS); Address::from_field(field) } @@ -217,14 +274,14 @@ pub fn compute_commitment_nonce(first_nullifier : Field, commitment_index : Fiel dep::std::hash::pedersen_hash_with_separator([ first_nullifier, commitment_index - ], constants_gen::GENERATOR_INDEX__COMMITMENT_NONCE) + ], GENERATOR_INDEX__COMMITMENT_NONCE) } pub fn compute_unique_siloed_commitment(nonce: Field, siloed_commitment: Field) -> Field { dep::std::hash::pedersen_hash_with_separator([ nonce, siloed_commitment - ], constants_gen::GENERATOR_INDEX__UNIQUE_COMMITMENT) + ], GENERATOR_INDEX__UNIQUE_COMMITMENT) } pub fn compute_unique_siloed_commitments(first_nullifier: Field, siloed_commitments: [Field; N]) -> [Field; N] { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr index 16c2115161e..d3140b40e9b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/interop_testing.nr @@ -7,9 +7,8 @@ use crate::abis::function_data::FunctionData; use crate::abis::function_leaf_preimage::FunctionLeafPreimage; use crate::contrakt::deployment_data::ContractDeploymentData; use crate::abis::function_selector::FunctionSelector; -use crate::hash::compute_l2_to_l1_hash; +use crate::hash::{compute_l2_to_l1_hash, sha256_to_field}; use crate::abis::call_stack_item::PublicCallStackItem; -use dep::aztec::hash::sha256_to_field; use crate::abis::public_circuit_public_inputs::PublicCircuitPublicInputs; #[test] diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr index 8a33e4f28eb..90c1376598d 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/lib.nr @@ -8,6 +8,7 @@ mod contrakt; mod transaction; mod abis; mod block; +mod constants; mod mocked; mod hash; diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr index fca591a7fa6..89c5da01d76 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures.nr @@ -15,7 +15,7 @@ global MSG_SENDER = Address { inner: 27 }; global DEPLOYER_PUBLIC_KEY = Point { x: 123456789, y: 123456789 }; global BLOCK_HEADER = BlockHeader { - blocks_tree_root: 0, + archive_root: 0, block: Block { note_hash_tree_root: fixtures::note_hash_tree::ROOT, nullifier_tree_root: 0, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr index 446fa2e5e70..97a57f44976 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/fixtures/read_requests.nr @@ -1,10 +1,8 @@ use crate::abis::membership_witness::ReadRequestMembershipWitness; use crate::tests::fixtures; use crate::utils::bounded_vec::BoundedVec; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, - }, }; pub fn generate_read_requests(how_many: Field) -> ( diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr index d41008a7ad4..9c4c26dffc6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/previous_kernel_data_builder.nr @@ -19,15 +19,13 @@ use crate::{ }, transaction::context::TxContext, }; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ EMPTY_NULLIFIED_COMMITMENT, MAX_NEW_COMMITMENTS_PER_TX, MAX_NEW_NULLIFIERS_PER_TX, MAX_PUBLIC_DATA_READS_PER_TX, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_TX, VK_TREE_HEIGHT, - }, }; struct PreviousKernelDataBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr index b03552e2df6..0cb9b217f54 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_call_data_builder.nr @@ -26,12 +26,10 @@ use crate::{ bounded_vec::BoundedVec, }, }; -use dep::aztec::{ - constants_gen::{ +use crate::constants::{ MAX_PRIVATE_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_READ_REQUESTS_PER_CALL, - }, }; struct PrivateCallDataBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr index f000639f3df..e412af64d98 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/private_circuit_public_inputs_builder.nr @@ -6,16 +6,14 @@ use crate::{ private_circuit_public_inputs::PrivateCircuitPublicInputs, }, contrakt::deployment_data::ContractDeploymentData, - hash::{compute_constructor_hash, NUM_FIELDS_PER_SHA256}, + hash::{compute_constructor_hash, NUM_FIELDS_PER_SHA256, hash_args}, tests::{ fixtures, testing_harness::build_contract_deployment_data, }, utils::bounded_vec::BoundedVec, }; -use dep::aztec::{ - abi::hash_args, - constants_gen::{ +use crate::constants::{ MAX_READ_REQUESTS_PER_CALL, MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, @@ -24,7 +22,6 @@ use dep::aztec::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, RETURN_VALUES_LENGTH, - } }; struct PrivateCircuitPublicInputsBuilder { diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr index fdf4678b161..01ee980b65b 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_call_data_builder.nr @@ -19,7 +19,7 @@ use crate::{ }, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_PUBLIC_CALL_STACK_LENGTH_PER_CALL, MAX_PUBLIC_DATA_READS_PER_CALL, MAX_PUBLIC_DATA_UPDATE_REQUESTS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr index 7f1de0beee6..34d3df007c4 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/tests/public_circuit_public_inputs_builder.nr @@ -13,7 +13,7 @@ use crate::{ tests::fixtures, utils::bounded_vec::BoundedVec, }; -use dep::aztec::constants_gen::{ +use crate::constants::{ MAX_NEW_COMMITMENTS_PER_CALL, MAX_NEW_L2_TO_L1_MSGS_PER_CALL, MAX_NEW_NULLIFIERS_PER_CALL, diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr index 3032020b61b..3715951ca78 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/context.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__TX_CONTEXT; use crate::contrakt::deployment_data::ContractDeploymentData; struct TxContext { @@ -12,7 +12,6 @@ struct TxContext { version : Field, } - impl TxContext { fn hash(self) -> Field { dep::std::hash::pedersen_hash_with_separator([ @@ -22,6 +21,6 @@ impl TxContext { self.contract_deployment_data.hash(), self.chain_id, self.version, - ], constants_gen::GENERATOR_INDEX__TX_CONTEXT) + ], GENERATOR_INDEX__TX_CONTEXT) } } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr index 8bd6a5d963e..89d48366778 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/transaction/request.nr @@ -1,4 +1,4 @@ -use dep::aztec::constants_gen; +use crate::constants::GENERATOR_INDEX__TX_REQUEST; use crate::address::Address; use crate::transaction::context::TxContext; use crate::abis::function_data::FunctionData; @@ -17,6 +17,6 @@ impl TxRequest { self.function_data.hash(), self.args_hash, self.tx_context.hash(), - ], constants_gen::GENERATOR_INDEX__TX_REQUEST) + ], GENERATOR_INDEX__TX_REQUEST) } } diff --git a/yarn-project/noir-protocol-circuits/src/type_conversion.ts b/yarn-project/noir-protocol-circuits/src/type_conversion.ts index b4f5352c688..ebcf5bd5856 100644 --- a/yarn-project/noir-protocol-circuits/src/type_conversion.ts +++ b/yarn-project/noir-protocol-circuits/src/type_conversion.ts @@ -1,8 +1,8 @@ import { + ARCHIVE_HEIGHT, AggregationObject, AppendOnlyTreeSnapshot, AztecAddress, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, BlockHeader, @@ -114,8 +114,8 @@ import { StorageUpdateRequest as StorageUpdateRequestNoir, } from './types/public_kernel_private_previous_types.js'; import { + ArchiveRootMembershipWitness as ArchiveRootMembershipWitnessNoir, BaseRollupInputs as BaseRollupInputsNoir, - BlocksTreeRootMembershipWitness as BlocksTreeRootMembershipWitnessNoir, NullifierLeafPreimage as NullifierLeafPreimageNoir, NullifierMembershipWitness as NullifierMembershipWitnessNoir, } from './types/rollup_base_types.js'; @@ -442,7 +442,7 @@ export function mapCallRequestToNoir(callRequest: CallRequest): CallRequestNoir */ export function mapBlockHeaderToNoir(blockHeader: BlockHeader): BlockHeaderNoir { return { - blocks_tree_root: mapFieldToNoir(blockHeader.blocksTreeRoot), + archive_root: mapFieldToNoir(blockHeader.archiveRoot), block: { note_hash_tree_root: mapFieldToNoir(blockHeader.noteHashTreeRoot), nullifier_tree_root: mapFieldToNoir(blockHeader.nullifierTreeRoot), @@ -466,7 +466,7 @@ export function mapBlockHeaderFromNoir(blockHeader: BlockHeaderNoir): BlockHeade mapFieldFromNoir(blockHeader.block.nullifier_tree_root), mapFieldFromNoir(blockHeader.block.contract_tree_root), mapFieldFromNoir(blockHeader.block.l1_to_l2_messages_tree_root), - mapFieldFromNoir(blockHeader.blocks_tree_root), + mapFieldFromNoir(blockHeader.archive_root), mapFieldFromNoir(blockHeader.private_kernel_vk_tree_root), mapFieldFromNoir(blockHeader.block.public_data_tree_root), mapFieldFromNoir(blockHeader.block.global_variables_hash), @@ -1030,7 +1030,7 @@ export function mapGlobalVariablesFromNoir(globalVariables: GlobalVariablesNoir) */ export function mapConstantRollupDataToNoir(constantRollupData: ConstantRollupData): ConstantRollupDataNoir { return { - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(constantRollupData.startBlocksTreeSnapshot), + archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(constantRollupData.archiveSnapshot), private_kernel_vk_tree_root: mapFieldToNoir(constantRollupData.privateKernelVkTreeRoot), public_kernel_vk_tree_root: mapFieldToNoir(constantRollupData.publicKernelVkTreeRoot), base_rollup_vk_hash: mapFieldToNoir(constantRollupData.baseRollupVkHash), @@ -1074,7 +1074,7 @@ export function mapPublicCircuitPublicInputsToNoir( */ export function mapConstantRollupDataFromNoir(constantRollupData: ConstantRollupDataNoir): ConstantRollupData { return new ConstantRollupData( - mapAppendOnlyTreeSnapshotFromNoir(constantRollupData.start_blocks_tree_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(constantRollupData.archive_snapshot), mapFieldFromNoir(constantRollupData.private_kernel_vk_tree_root), mapFieldFromNoir(constantRollupData.public_kernel_vk_tree_root), mapFieldFromNoir(constantRollupData.base_rollup_vk_hash), @@ -1240,8 +1240,8 @@ export function mapRootRollupInputsToNoir(rootRollupInputs: RootRollupInputs): R start_l1_to_l2_messages_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir( rootRollupInputs.startL1ToL2MessagesTreeSnapshot, ), - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(rootRollupInputs.startBlocksTreeSnapshot), - new_blocks_tree_sibling_path: mapTuple(rootRollupInputs.newBlocksTreeSiblingPath, mapFieldToNoir), + start_archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(rootRollupInputs.startArchiveSnapshot), + new_archive_sibling_path: mapTuple(rootRollupInputs.newArchiveSiblingPath, mapFieldToNoir), }; } @@ -1264,20 +1264,10 @@ export function mapRootRollupPublicInputsFromNoir( mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_contract_tree_snapshot), mapFieldFromNoir(rootRollupPublicInputs.start_public_data_tree_root), mapFieldFromNoir(rootRollupPublicInputs.end_public_data_tree_root), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_tree_of_historical_note_hash_tree_roots_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_tree_of_historical_note_hash_tree_roots_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_tree_of_historical_contract_tree_roots_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_tree_of_historical_contract_tree_roots_snapshot), mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_l1_to_l2_messages_tree_snapshot), mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_l1_to_l2_messages_tree_snapshot), - mapAppendOnlyTreeSnapshotFromNoir( - rootRollupPublicInputs.start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, - ), - mapAppendOnlyTreeSnapshotFromNoir( - rootRollupPublicInputs.end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot, - ), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_blocks_tree_snapshot), - mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_blocks_tree_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.start_archive_snapshot), + mapAppendOnlyTreeSnapshotFromNoir(rootRollupPublicInputs.end_archive_snapshot), mapTupleFromNoir(rootRollupPublicInputs.calldata_hash, 2, mapFieldFromNoir), mapTupleFromNoir(rootRollupPublicInputs.l1_to_l2_messages_hash, 2, mapFieldFromNoir), ); @@ -1303,9 +1293,9 @@ export function mapNullifierLeafPreimageToNoir( nullifierLeafPreimage: NullifierLeafPreimage, ): NullifierLeafPreimageNoir { return { - leaf_value: mapFieldToNoir(nullifierLeafPreimage.leafValue), - next_value: mapFieldToNoir(nullifierLeafPreimage.nextValue), - next_index: mapFieldToNoir(new Fr(nullifierLeafPreimage.nextIndex)), + leaf_value: mapFieldToNoir(nullifierLeafPreimage.nullifier), + next_value: mapFieldToNoir(nullifierLeafPreimage.nextNullifier), + next_index: mapNumberToNoir(Number(nullifierLeafPreimage.nextIndex)), }; } @@ -1328,9 +1318,9 @@ export function mapNullifierMembershipWitnessToNoir( * @param membershipWitness - The membership witness. * @returns The noir membership witness. */ -export function mapBlocksTreeRootMembershipWitnessToNoir( - membershipWitness: MembershipWitness, -): BlocksTreeRootMembershipWitnessNoir { +export function mapArchiveRootMembershipWitnessToNoir( + membershipWitness: MembershipWitness, +): ArchiveRootMembershipWitnessNoir { return { leaf_index: membershipWitness.leafIndex.toString(), sibling_path: mapTuple(membershipWitness.siblingPath, mapFieldToNoir), @@ -1349,7 +1339,7 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI start_nullifier_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startNullifierTreeSnapshot), start_contract_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startContractTreeSnapshot), start_public_data_tree_root: mapFieldToNoir(inputs.startPublicDataTreeRoot), - start_blocks_tree_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.startBlocksTreeSnapshot), + archive_snapshot: mapAppendOnlyTreeSnapshotToNoir(inputs.archiveSnapshot), sorted_new_nullifiers: mapTuple(inputs.sortedNewNullifiers, mapFieldToNoir), sorted_new_nullifiers_indexes: mapTuple(inputs.sortednewNullifiersIndexes, mapNumberToNoir), low_nullifier_leaf_preimages: mapTuple(inputs.lowNullifierLeafPreimages, mapNullifierLeafPreimageToNoir), @@ -1368,9 +1358,9 @@ export function mapBaseRollupInputsToNoir(inputs: BaseRollupInputs): BaseRollupI inputs.newPublicDataReadsSiblingPaths, (siblingPath: Tuple) => mapTuple(siblingPath, mapFieldToNoir), ), - blocks_tree_root_membership_witnesses: mapTuple( - inputs.blocksTreeRootMembershipWitnesses, - mapBlocksTreeRootMembershipWitnessToNoir, + archive_root_membership_witnesses: mapTuple( + inputs.archiveRootMembershipWitnesses, + mapArchiveRootMembershipWitnessToNoir, ), constants: mapConstantRollupDataToNoir(inputs.constants), }; diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts index d5beb0d9ab4..73ae45036ae 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_init_types.ts @@ -75,7 +75,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts index 2666c692e66..1c9123613e5 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_inner_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts b/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts index a84711e7fea..d48a7eb892b 100644 --- a/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/private_kernel_ordering_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts b/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts index 7690aca56f5..2d92f904cb5 100644 --- a/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/public_kernel_private_previous_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts b/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts index 739dce88379..8a974060677 100644 --- a/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/public_kernel_public_previous_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts index 1c880cb2cdf..9fe5308c761 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_base_types.ts @@ -97,7 +97,7 @@ export interface Block { } export interface BlockHeader { - blocks_tree_root: Field; + archive_root: Field; block: Block; private_kernel_vk_tree_root: Field; } @@ -163,7 +163,7 @@ export interface NullifierMembershipWitness { sibling_path: FixedLengthArray; } -export interface BlocksTreeRootMembershipWitness { +export interface ArchiveRootMembershipWitness { leaf_index: Field; sibling_path: FixedLengthArray; } @@ -176,7 +176,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; @@ -190,7 +190,7 @@ export interface BaseRollupInputs { start_nullifier_tree_snapshot: AppendOnlyTreeSnapshot; start_contract_tree_snapshot: AppendOnlyTreeSnapshot; start_public_data_tree_root: Field; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; sorted_new_nullifiers: FixedLengthArray; sorted_new_nullifiers_indexes: FixedLengthArray; low_nullifier_leaf_preimages: FixedLengthArray; @@ -200,7 +200,7 @@ export interface BaseRollupInputs { new_contracts_subtree_sibling_path: FixedLengthArray; new_public_data_update_requests_sibling_paths: FixedLengthArray, 32>; new_public_data_reads_sibling_paths: FixedLengthArray, 32>; - blocks_tree_root_membership_witnesses: FixedLengthArray; + archive_root_membership_witnesses: FixedLengthArray; constants: ConstantRollupData; } diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts index 233624ab34d..e6a4f760ab7 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_merge_types.ts @@ -22,7 +22,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; diff --git a/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts b/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts index 63c4b01df51..b1ad54abde9 100644 --- a/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts +++ b/yarn-project/noir-protocol-circuits/src/types/rollup_root_types.ts @@ -22,7 +22,7 @@ export interface GlobalVariables { } export interface ConstantRollupData { - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + archive_snapshot: AppendOnlyTreeSnapshot; private_kernel_vk_tree_root: Field; public_kernel_vk_tree_root: Field; base_rollup_vk_hash: Field; @@ -68,8 +68,8 @@ export interface RootRollupInputs { new_l1_to_l2_messages: FixedLengthArray; new_l1_to_l2_messages_tree_root_sibling_path: FixedLengthArray; start_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; - new_blocks_tree_sibling_path: FixedLengthArray; + start_archive_snapshot: AppendOnlyTreeSnapshot; + new_archive_sibling_path: FixedLengthArray; } export interface RootRollupPublicInputs { @@ -83,16 +83,10 @@ export interface RootRollupPublicInputs { end_contract_tree_snapshot: AppendOnlyTreeSnapshot; start_public_data_tree_root: Field; end_public_data_tree_root: Field; - start_tree_of_historical_note_hash_tree_roots_snapshot: AppendOnlyTreeSnapshot; - end_tree_of_historical_note_hash_tree_roots_snapshot: AppendOnlyTreeSnapshot; - start_tree_of_historical_contract_tree_roots_snapshot: AppendOnlyTreeSnapshot; - end_tree_of_historical_contract_tree_roots_snapshot: AppendOnlyTreeSnapshot; start_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; end_l1_to_l2_messages_tree_snapshot: AppendOnlyTreeSnapshot; - start_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot: AppendOnlyTreeSnapshot; - end_tree_of_historical_l1_to_l2_messages_tree_roots_snapshot: AppendOnlyTreeSnapshot; - start_blocks_tree_snapshot: AppendOnlyTreeSnapshot; - end_blocks_tree_snapshot: AppendOnlyTreeSnapshot; + start_archive_snapshot: AppendOnlyTreeSnapshot; + end_archive_snapshot: AppendOnlyTreeSnapshot; calldata_hash: FixedLengthArray; l1_to_l2_messages_hash: FixedLengthArray; } diff --git a/yarn-project/p2p-bootstrap/terraform/main.tf b/yarn-project/p2p-bootstrap/terraform/main.tf index 43925bd5b29..cfa63547503 100644 --- a/yarn-project/p2p-bootstrap/terraform/main.tf +++ b/yarn-project/p2p-bootstrap/terraform/main.tf @@ -224,27 +224,18 @@ resource "aws_security_group_rule" "allow-bootstrap-tcp" { security_group_id = data.terraform_remote_state.aztec-network_iac.outputs.p2p_security_group_id } -## Commented out here and setup manually as terraform (or the aws provider version we are using) has a bug -## NLB listeners can't have a 'weight' property defined. You will see there isn't one here but that doesn't -## stop it trying to automatically specify one and giving an error - -# resource "aws_lb_listener" "aztec-bootstrap-tcp-listener" { -# count = local.bootnode_count -# load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn -# port = var.BOOTNODE_LISTEN_PORT + count.index -# protocol = "TCP" - -# tags = { -# name = "aztec-bootstrap-${count.index}-target-group" -# } - -# default_action { -# type = "forward" - -# forward { -# target_group { -# arn = aws_lb_target_group.aztec-bootstrap-target-group[count.index].arn -# } -# } -# } -# } +resource "aws_lb_listener" "aztec-bootstrap-tcp-listener" { + count = local.bootnode_count + load_balancer_arn = data.terraform_remote_state.aztec-network_iac.outputs.nlb_arn + port = var.BOOTNODE_LISTEN_PORT + count.index + protocol = "TCP" + + tags = { + name = "aztec-bootstrap-${count.index}-target-group" + } + + default_action { + type = "forward" + target_group_arn = aws_lb_target_group.aztec-bootstrap-target-group[count.index].arn + } +} diff --git a/yarn-project/package.json b/yarn-project/package.json index c123e44e1b3..089ce942376 100644 --- a/yarn-project/package.json +++ b/yarn-project/package.json @@ -64,7 +64,7 @@ "resolutions": { "ts-jest@^29.1.0": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", "ts-jest@^29.1.1": "patch:ts-jest@npm%3A29.1.1#./.yarn/patches/ts-jest-npm-29.1.1-04e888e48e.patch", - "@aztec/bb.js": "portal:../barretenberg/ts/package", + "@aztec/bb.js": "portal:../barretenberg/ts", "@noir-lang/acvm_js": "portal:../noir/packages/acvm_js", "@noir-lang/backend_barretenberg": "portal:../noir/packages/backend_barretenberg", "@noir-lang/types": "portal:../noir/packages/types", diff --git a/yarn-project/pxe/src/bin/index.ts b/yarn-project/pxe/src/bin/index.ts index de9e3684ed2..43e49dd8271 100644 --- a/yarn-project/pxe/src/bin/index.ts +++ b/yarn-project/pxe/src/bin/index.ts @@ -1,4 +1,5 @@ #!/usr/bin/env -S node --no-warnings +import { init } from '@aztec/foundation/crypto'; import { createDebugLogger } from '@aztec/foundation/log'; import { createAztecNodeClient } from '@aztec/types'; @@ -16,6 +17,8 @@ const logger = createDebugLogger('aztec:pxe_service'); async function main() { logger.info(`Setting up PXE...`); + await init(); + const pxeConfig = getPXEServiceConfig(); const nodeRpcClient = createAztecNodeClient(AZTEC_NODE_URL); const pxeService = await createPXEService(nodeRpcClient, pxeConfig); diff --git a/yarn-project/pxe/src/database/memory_db.ts b/yarn-project/pxe/src/database/memory_db.ts index b0562f0ceb6..4f2849b42e5 100644 --- a/yarn-project/pxe/src/database/memory_db.ts +++ b/yarn-project/pxe/src/database/memory_db.ts @@ -128,7 +128,7 @@ export class MemoryDB extends MemoryContractDatabase implements Database { roots[MerkleTreeId.NULLIFIER_TREE], roots[MerkleTreeId.CONTRACT_TREE], roots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - roots[MerkleTreeId.BLOCKS_TREE], + roots[MerkleTreeId.ARCHIVE], Fr.ZERO, // todo: private kernel vk tree root roots[MerkleTreeId.PUBLIC_DATA_TREE], this.globalVariablesHash, @@ -142,7 +142,7 @@ export class MemoryDB extends MemoryContractDatabase implements Database { [MerkleTreeId.NULLIFIER_TREE]: blockHeader.nullifierTreeRoot, [MerkleTreeId.CONTRACT_TREE]: blockHeader.contractTreeRoot, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: blockHeader.l1ToL2MessagesTreeRoot, - [MerkleTreeId.BLOCKS_TREE]: blockHeader.blocksTreeRoot, + [MerkleTreeId.ARCHIVE]: blockHeader.archiveRoot, [MerkleTreeId.PUBLIC_DATA_TREE]: blockHeader.publicDataTreeRoot, }); } diff --git a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts index 665fd47b648..0cde8a8b396 100644 --- a/yarn-project/pxe/src/pxe_http/pxe_http_server.ts +++ b/yarn-project/pxe/src/pxe_http/pxe_http_server.ts @@ -1,5 +1,6 @@ import { FunctionSelector } from '@aztec/circuits.js'; import { AztecAddress } from '@aztec/foundation/aztec-address'; +import { EthAddress } from '@aztec/foundation/eth-address'; import { Fr, GrumpkinScalar, Point } from '@aztec/foundation/fields'; import { JsonRpcServer } from '@aztec/foundation/json-rpc/server'; import { @@ -24,8 +25,6 @@ import { import http from 'http'; import { foundry } from 'viem/chains'; -import { EthAddress } from '../index.js'; - export const localAnvil = foundry; /** diff --git a/yarn-project/pxe/src/simulator_oracle/index.ts b/yarn-project/pxe/src/simulator_oracle/index.ts index 1bb9a289c74..7e495c173ea 100644 --- a/yarn-project/pxe/src/simulator_oracle/index.ts +++ b/yarn-project/pxe/src/simulator_oracle/index.ts @@ -147,8 +147,8 @@ export class SimulatorOracle implements DBOracle { return (await this.stateInfoProvider.getNullifierTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); case MerkleTreeId.NOTE_HASH_TREE: return (await this.stateInfoProvider.getNoteHashSiblingPath(blockNumber, leafIndex)).toFieldArray(); - case MerkleTreeId.BLOCKS_TREE: - return (await this.stateInfoProvider.getBlocksTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); + case MerkleTreeId.ARCHIVE: + return (await this.stateInfoProvider.getArchiveSiblingPath(blockNumber, leafIndex)).toFieldArray(); case MerkleTreeId.PUBLIC_DATA_TREE: return (await this.stateInfoProvider.getPublicDataTreeSiblingPath(blockNumber, leafIndex)).toFieldArray(); default: diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts index 6ee3071712c..95893f10810 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.test.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.test.ts @@ -24,7 +24,7 @@ describe('Synchronizer', () => { [MerkleTreeId.NULLIFIER_TREE]: blockHeader.nullifierTreeRoot, [MerkleTreeId.PUBLIC_DATA_TREE]: blockHeader.publicDataTreeRoot, [MerkleTreeId.L1_TO_L2_MESSAGES_TREE]: blockHeader.l1ToL2MessagesTreeRoot, - [MerkleTreeId.BLOCKS_TREE]: blockHeader.blocksTreeRoot, + [MerkleTreeId.ARCHIVE]: blockHeader.archiveRoot, }; aztecNode = mock(); diff --git a/yarn-project/pxe/src/synchronizer/synchronizer.ts b/yarn-project/pxe/src/synchronizer/synchronizer.ts index 98dfd30c7e6..1c557386c18 100644 --- a/yarn-project/pxe/src/synchronizer/synchronizer.ts +++ b/yarn-project/pxe/src/synchronizer/synchronizer.ts @@ -206,7 +206,7 @@ export class Synchronizer { block.endNullifierTreeSnapshot.root, block.endContractTreeSnapshot.root, block.endL1ToL2MessagesTreeSnapshot.root, - block.endBlocksTreeSnapshot.root, + block.endArchiveSnapshot.root, Fr.ZERO, // todo: private kernel vk tree root block.endPublicDataTreeRoot, globalsHash, diff --git a/yarn-project/scripts/version_packages.sh b/yarn-project/scripts/version_packages.sh new file mode 100755 index 00000000000..a708cb7a4ed --- /dev/null +++ b/yarn-project/scripts/version_packages.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +if [ -n "$COMMIT_TAG" ]; then + for workspace in $(yarn workspaces list --json | jq -r '.location'); do + (cd $workspace && jq --arg v $COMMIT_TAG '.version = $v' package.json > _temp && mv _temp package.json) + done +fi \ No newline at end of file diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts index 808e02f6cae..5909afce8b3 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.test.ts @@ -143,7 +143,7 @@ describe('sequencer/solo_block_builder', () => { await expectsDb.appendLeaves(MerkleTreeId.L1_TO_L2_MESSAGES_TREE, asBuffer); }; - const updateBlocksTree = async () => { + const updateArchive = async () => { const blockHash = computeBlockHashWithGlobals( globalVariables, rootRollupOutput.endNoteHashTreeSnapshot.root, @@ -152,7 +152,7 @@ describe('sequencer/solo_block_builder', () => { rootRollupOutput.endL1ToL2MessagesTreeSnapshot.root, rootRollupOutput.endPublicDataTreeRoot, ); - await expectsDb.appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await expectsDb.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); }; const getTreeSnapshot = async (tree: MerkleTreeId) => { @@ -204,8 +204,8 @@ describe('sequencer/solo_block_builder', () => { // Calculate block hash rootRollupOutput.globalVariables = globalVariables; - await updateBlocksTree(); - rootRollupOutput.endBlocksTreeSnapshot = await getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); + await updateArchive(); + rootRollupOutput.endArchiveSnapshot = await getTreeSnapshot(MerkleTreeId.ARCHIVE); const txs = [...txsLeft, ...txsRight]; @@ -235,8 +235,8 @@ describe('sequencer/solo_block_builder', () => { endPublicDataTreeRoot: rootRollupOutput.endPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: rootRollupOutput.startL1ToL2MessagesTreeSnapshot, endL1ToL2MessagesTreeSnapshot: rootRollupOutput.endL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot: rootRollupOutput.startBlocksTreeSnapshot, - endBlocksTreeSnapshot: rootRollupOutput.endBlocksTreeSnapshot, + startArchiveSnapshot: rootRollupOutput.startArchiveSnapshot, + endArchiveSnapshot: rootRollupOutput.endArchiveSnapshot, newCommitments, newNullifiers, newContracts, diff --git a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts index 06976b2f209..7b19203826f 100644 --- a/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts +++ b/yarn-project/sequencer-client/src/block_builder/solo_block_builder.ts @@ -1,6 +1,6 @@ import { + ARCHIVE_HEIGHT, AppendOnlyTreeSnapshot, - BLOCKS_TREE_HEIGHT, BaseOrMergeRollupPublicInputs, BaseRollupInputs, CONTRACT_SUBTREE_HEIGHT, @@ -58,11 +58,9 @@ import { RollupProver } from '../prover/index.js'; import { ProcessedTx } from '../sequencer/processed_tx.js'; import { RollupSimulator } from '../simulator/index.js'; import { BlockBuilder } from './index.js'; -import { AllowedTreeNames, OutputWithTreeSnapshot } from './types.js'; +import { AllowedTreeNames, OutputWithTreeSnapshot, TreeNames } from './types.js'; const frToBigInt = (fr: Fr) => toBigIntBE(fr.toBuffer()); -const bigintToFr = (num: bigint) => new Fr(num); -const bigintToNum = (num: bigint) => Number(num); // Denotes fields that are not used now, but will be in the future const FUTURE_FR = new Fr(0n); @@ -103,7 +101,7 @@ export class SoloBlockBuilder implements BlockBuilder { startContractTreeSnapshot, startPublicDataTreeSnapshot, startL1ToL2MessageTreeSnapshot, - startBlocksTreeSnapshot, + startArchiveSnapshot, ] = await Promise.all( [ MerkleTreeId.NOTE_HASH_TREE, @@ -111,7 +109,7 @@ export class SoloBlockBuilder implements BlockBuilder { MerkleTreeId.CONTRACT_TREE, MerkleTreeId.PUBLIC_DATA_TREE, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, - MerkleTreeId.BLOCKS_TREE, + MerkleTreeId.ARCHIVE, ].map(tree => this.getTreeSnapshot(tree)), ); @@ -127,7 +125,7 @@ export class SoloBlockBuilder implements BlockBuilder { endContractTreeSnapshot, endPublicDataTreeRoot, endL1ToL2MessagesTreeSnapshot, - endBlocksTreeSnapshot, + endArchiveSnapshot, } = circuitsOutput; // Collect all new nullifiers, commitments, and contracts from all txs in this block @@ -167,8 +165,8 @@ export class SoloBlockBuilder implements BlockBuilder { endPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: startL1ToL2MessageTreeSnapshot, endL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, - endBlocksTreeSnapshot, + startArchiveSnapshot, + endArchiveSnapshot, newCommitments, newNullifiers, newL2ToL1Msgs, @@ -313,17 +311,17 @@ export class SoloBlockBuilder implements BlockBuilder { this.debug(`Updating and validating root trees`); const globalVariablesHash = computeGlobalsHash(left[0].constants.globalVariables); await this.db.updateLatestGlobalVariablesHash(globalVariablesHash); - await this.db.updateBlocksTree(globalVariablesHash); + await this.db.updateArchive(globalVariablesHash); await this.validateRootOutput(rootOutput); return [rootOutput, rootProof]; } - async updateBlocksTree(globalVariables: GlobalVariables) { + async updateArchive(globalVariables: GlobalVariables) { // Calculate the block hash and add it to the historical block hashes tree const blockHash = await this.calculateBlockHash(globalVariables); - await this.db.appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this.db.appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); } protected async calculateBlockHash(globals: GlobalVariables) { @@ -353,9 +351,9 @@ export class SoloBlockBuilder implements BlockBuilder { // Validate that the new roots we calculated from manual insertions match the outputs of the simulation protected async validateTrees(rollupOutput: BaseOrMergeRollupPublicInputs | RootRollupPublicInputs) { await Promise.all([ - this.validateTree(rollupOutput, MerkleTreeId.CONTRACT_TREE, 'Contract'), - this.validateTree(rollupOutput, MerkleTreeId.NOTE_HASH_TREE, 'NoteHash'), - this.validateTree(rollupOutput, MerkleTreeId.NULLIFIER_TREE, 'Nullifier'), + this.validateTree(rollupOutput, MerkleTreeId.CONTRACT_TREE, 'ContractTree'), + this.validateTree(rollupOutput, MerkleTreeId.NOTE_HASH_TREE, 'NoteHashTree'), + this.validateTree(rollupOutput, MerkleTreeId.NULLIFIER_TREE, 'NullifierTree'), this.validatePublicDataTreeRoot(rollupOutput), ]); } @@ -364,22 +362,11 @@ export class SoloBlockBuilder implements BlockBuilder { protected async validateRootOutput(rootOutput: RootRollupPublicInputs) { await Promise.all([ this.validateTrees(rootOutput), - this.validateTree(rootOutput, MerkleTreeId.BLOCKS_TREE, 'Blocks'), - this.validateTree(rootOutput, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, 'L1ToL2Messages'), + this.validateTree(rootOutput, MerkleTreeId.ARCHIVE, 'Archive'), + this.validateTree(rootOutput, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, 'L1ToL2MessagesTree'), ]); } - // Helper for validating a roots tree against a circuit simulation output - protected async validateRootTree( - rootOutput: RootRollupPublicInputs, - treeId: MerkleTreeId, - name: 'Contract' | 'NoteHash' | 'L1ToL2Messages', - ) { - const localTree = await this.getTreeSnapshot(treeId); - const simulatedTree = rootOutput[`endTreeOfHistorical${name}TreeRootsSnapshot`]; - this.validateSimulatedTree(localTree, simulatedTree, name, `Roots ${name}`); - } - /** * Validates that the root of the public data tree matches the output of the circuit simulation. * @param output - The output of the circuit simulation. @@ -406,7 +393,7 @@ export class SoloBlockBuilder implements BlockBuilder { } const localTree = await this.getTreeSnapshot(treeId); - const simulatedTree = (output as OutputWithTreeSnapshot)[`end${name}TreeSnapshot`]; + const simulatedTree = (output as OutputWithTreeSnapshot)[`end${name}Snapshot`]; this.validateSimulatedTree(localTree, simulatedTree, name); } @@ -414,7 +401,7 @@ export class SoloBlockBuilder implements BlockBuilder { protected validateSimulatedTree( localTree: AppendOnlyTreeSnapshot, simulatedTree: AppendOnlyTreeSnapshot, - name: 'NoteHash' | 'Contract' | 'Nullifier' | 'L1ToL2Messages' | 'Blocks', + name: TreeNames, label?: string, ) { if (!simulatedTree.root.toBuffer().equals(localTree.root.toBuffer())) { @@ -467,12 +454,12 @@ export class SoloBlockBuilder implements BlockBuilder { const startL1ToL2MessagesTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE); // Get blocks tree - const startBlocksTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); - const newBlocksTreeSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.BLOCKS_TREE); + const startArchiveSnapshot = await this.getTreeSnapshot(MerkleTreeId.ARCHIVE); + const newArchiveSiblingPathArray = await getRootTreeSiblingPath(MerkleTreeId.ARCHIVE); - const newBlocksTreeSiblingPath = makeTuple( - BLOCKS_TREE_HEIGHT, - i => (i < newBlocksTreeSiblingPathArray.length ? newBlocksTreeSiblingPathArray[i] : Fr.ZERO), + const newArchiveSiblingPath = makeTuple( + ARCHIVE_HEIGHT, + i => (i < newArchiveSiblingPathArray.length ? newArchiveSiblingPathArray[i] : Fr.ZERO), 0, ); @@ -481,8 +468,8 @@ export class SoloBlockBuilder implements BlockBuilder { newL1ToL2Messages, newL1ToL2MessagesTreeRootSiblingPath, startL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, - newBlocksTreeSiblingPath, + startArchiveSnapshot, + newArchiveSiblingPath, }); } @@ -551,7 +538,7 @@ export class SoloBlockBuilder implements BlockBuilder { l1ToL2MessagesTreeRoot, publicDataTreeRoot, ); - return this.getMembershipWitnessFor(blockHash, MerkleTreeId.BLOCKS_TREE, BLOCKS_TREE_HEIGHT); + return this.getMembershipWitnessFor(blockHash, MerkleTreeId.ARCHIVE, ARCHIVE_HEIGHT); } protected async getConstantRollupData(globalVariables: GlobalVariables): Promise { @@ -560,7 +547,7 @@ export class SoloBlockBuilder implements BlockBuilder { mergeRollupVkHash: DELETE_FR, privateKernelVkTreeRoot: FUTURE_FR, publicKernelVkTreeRoot: FUTURE_FR, - startBlocksTreeSnapshot: await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE), + archiveSnapshot: await this.getTreeSnapshot(MerkleTreeId.ARCHIVE), globalVariables, }); } @@ -577,19 +564,16 @@ export class SoloBlockBuilder implements BlockBuilder { const tree = MerkleTreeId.NULLIFIER_TREE; const prevValueIndex = await this.db.getPreviousValueIndex(tree, frToBigInt(nullifier)); - const prevValueInfo = await this.db.getLeafData(tree, prevValueIndex.index); - if (!prevValueInfo) { + if (!prevValueIndex) { throw new Error(`Nullifier tree should have one initial leaf`); } + const prevValuePreimage = (await this.db.getLeafPreimage(tree, prevValueIndex.index))!; + const prevValueSiblingPath = await this.db.getSiblingPath(tree, BigInt(prevValueIndex.index)); return { index: prevValueIndex, - leafPreimage: new NullifierLeafPreimage( - bigintToFr(prevValueInfo.value), - bigintToFr(prevValueInfo.nextValue), - bigintToNum(prevValueInfo.nextIndex), - ), + leafPreimage: prevValuePreimage, witness: new MembershipWitness( NULLIFIER_TREE_HEIGHT, BigInt(prevValueIndex.index), @@ -651,7 +635,7 @@ export class SoloBlockBuilder implements BlockBuilder { const startContractTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.CONTRACT_TREE); const startNoteHashTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE); const startPublicDataTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE); - const startBlocksTreeSnapshot = await this.getTreeSnapshot(MerkleTreeId.BLOCKS_TREE); + const startArchiveSnapshot = await this.getTreeSnapshot(MerkleTreeId.ARCHIVE); // Get the subtree sibling paths for the circuit const newCommitmentsSubtreeSiblingPathArray = await this.getSubtreeSiblingPath( @@ -736,7 +720,7 @@ export class SoloBlockBuilder implements BlockBuilder { startContractTreeSnapshot, startNoteHashTreeSnapshot, startPublicDataTreeRoot: startPublicDataTreeSnapshot.root, - startBlocksTreeSnapshot, + archiveSnapshot: startArchiveSnapshot, sortedNewNullifiers: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => Fr.fromBuffer(sortedNewNullifiers[i])), sortednewNullifiersIndexes: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => sortednewNullifiersIndexes[i]), newCommitmentsSubtreeSiblingPath, @@ -748,12 +732,8 @@ export class SoloBlockBuilder implements BlockBuilder { newPublicDataReadsSiblingPaths, lowNullifierLeafPreimages: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => i < nullifierWitnessLeaves.length - ? new NullifierLeafPreimage( - new Fr(nullifierWitnessLeaves[i].leafData.value), - new Fr(nullifierWitnessLeaves[i].leafData.nextValue), - Number(nullifierWitnessLeaves[i].leafData.nextIndex), - ) - : new NullifierLeafPreimage(Fr.ZERO, Fr.ZERO, 0), + ? (nullifierWitnessLeaves[i].leafPreimage as NullifierLeafPreimage) + : NullifierLeafPreimage.empty(), ), lowNullifierMembershipWitness: makeTuple(MAX_NEW_NULLIFIERS_PER_BASE_ROLLUP, i => i < lowNullifierMembershipWitnesses.length @@ -761,7 +741,7 @@ export class SoloBlockBuilder implements BlockBuilder { : this.makeEmptyMembershipWitness(NULLIFIER_TREE_HEIGHT), ), kernelData: [this.getKernelDataFor(left), this.getKernelDataFor(right)], - blocksTreeRootMembershipWitnesses: [ + archiveRootMembershipWitnesses: [ await this.getHistoricalTreesMembershipWitnessFor(left), await this.getHistoricalTreesMembershipWitnessFor(right), ], diff --git a/yarn-project/sequencer-client/src/block_builder/types.ts b/yarn-project/sequencer-client/src/block_builder/types.ts index b39f0a90a17..8a645ca8c56 100644 --- a/yarn-project/sequencer-client/src/block_builder/types.ts +++ b/yarn-project/sequencer-client/src/block_builder/types.ts @@ -1,16 +1,23 @@ import { AppendOnlyTreeSnapshot, BaseOrMergeRollupPublicInputs, RootRollupPublicInputs } from '@aztec/circuits.js'; +/** + * Type representing the names of the trees for the base rollup. + */ +type BaseTreeNames = 'NoteHashTree' | 'ContractTree' | 'NullifierTree'; +/** + * Type representing the names of the trees. + */ +export type TreeNames = BaseTreeNames | 'L1ToL2MessagesTree' | 'Archive'; + /** * Type to assert that only the correct trees are checked when validating rollup tree outputs. */ export type AllowedTreeNames = - T extends RootRollupPublicInputs - ? 'NoteHash' | 'Contract' | 'Nullifier' | 'L1ToL2Messages' | 'Blocks' - : 'NoteHash' | 'Contract' | 'Nullifier'; + T extends RootRollupPublicInputs ? TreeNames : BaseTreeNames; /** * Type to assert the correct object field is indexed when validating rollup tree outputs. */ export type OutputWithTreeSnapshot = { - [K in `end${AllowedTreeNames}TreeSnapshot`]: AppendOnlyTreeSnapshot; + [K in `end${AllowedTreeNames}Snapshot`]: AppendOnlyTreeSnapshot; }; diff --git a/yarn-project/sequencer-client/src/sequencer/utils.ts b/yarn-project/sequencer-client/src/sequencer/utils.ts index 62cde96601b..d315146837f 100644 --- a/yarn-project/sequencer-client/src/sequencer/utils.ts +++ b/yarn-project/sequencer-client/src/sequencer/utils.ts @@ -17,7 +17,7 @@ export async function getBlockHeader( Fr.fromBuffer(roots.nullifierTreeRoot), Fr.fromBuffer(roots.contractDataTreeRoot), Fr.fromBuffer(roots.l1Tol2MessagesTreeRoot), - Fr.fromBuffer(roots.blocksTreeRoot), + Fr.fromBuffer(roots.archiveRoot), Fr.ZERO, Fr.fromBuffer(roots.publicDataTreeRoot), prevGlobalsHash, diff --git a/yarn-project/types/package.json b/yarn-project/types/package.json index 17e999a421f..5c472ea84dc 100644 --- a/yarn-project/types/package.json +++ b/yarn-project/types/package.json @@ -5,7 +5,10 @@ "exports": { ".": "./dest/index.js", "./stats": "./dest/stats/index.js", - "./jest": "./dest/jest/index.js" + "./jest": "./dest/jest/index.js", + "./interfaces": "./dest/interfaces/index.js", + "./log_id": "./dest/logs/log_id.js", + "./tx_hash": "./dest/tx/tx_hash.js" }, "typedocOptions": { "entryPoints": [ diff --git a/yarn-project/types/src/index.ts b/yarn-project/types/src/index.ts index 3ffc8125103..149ec5a7a6a 100644 --- a/yarn-project/types/src/index.ts +++ b/yarn-project/types/src/index.ts @@ -23,5 +23,4 @@ export * from './interfaces/index.js'; export * from './sibling_path.js'; export * from './auth_witness.js'; export * from './aztec_node/rpc/index.js'; -export * from '@aztec/circuits.js/types'; -export { CompleteAddress } from '@aztec/circuits.js'; +export { CompleteAddress, PublicKey, PartialAddress, GrumpkinPrivateKey } from '@aztec/circuits.js'; diff --git a/yarn-project/types/src/interfaces/deployed-contract.ts b/yarn-project/types/src/interfaces/deployed-contract.ts index 1e5fd058197..784b162ef67 100644 --- a/yarn-project/types/src/interfaces/deployed-contract.ts +++ b/yarn-project/types/src/interfaces/deployed-contract.ts @@ -1,6 +1,6 @@ -import { EthAddress } from '@aztec/circuits.js'; +import { CompleteAddress } from '@aztec/circuits.js'; import { ContractArtifact } from '@aztec/foundation/abi'; -import { CompleteAddress } from '@aztec/types'; +import { EthAddress } from '@aztec/foundation/eth-address'; /** * Represents a deployed contract on the Aztec network. diff --git a/yarn-project/types/src/interfaces/index.ts b/yarn-project/types/src/interfaces/index.ts index f351287cab2..44ed98bbed4 100644 --- a/yarn-project/types/src/interfaces/index.ts +++ b/yarn-project/types/src/interfaces/index.ts @@ -1,4 +1,4 @@ -export * from './state_provider.js'; +export * from './state_info_provider.js'; export * from './hasher.js'; export * from './aztec-node.js'; export * from './pxe.js'; @@ -6,5 +6,4 @@ export * from './deployed-contract.js'; export * from './node-info.js'; export * from './sync-status.js'; export * from './configs.js'; -export * from './leaf_data.js'; -export * from './nullifier_witness.js'; +export * from './nullifier_tree.js'; diff --git a/yarn-project/types/src/interfaces/leaf_data.ts b/yarn-project/types/src/interfaces/leaf_data.ts deleted file mode 100644 index 2edc8e09818..00000000000 --- a/yarn-project/types/src/interfaces/leaf_data.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** - * A leaf of a tree. - */ -export interface LeafData { - /** - * A value of the leaf. - */ - value: bigint; - /** - * An index of the next leaf. - */ - nextIndex: bigint; - /** - * A value of the next leaf. - */ - nextValue: bigint; -} diff --git a/yarn-project/types/src/interfaces/nullifier_witness.ts b/yarn-project/types/src/interfaces/nullifier_tree.ts similarity index 79% rename from yarn-project/types/src/interfaces/nullifier_witness.ts rename to yarn-project/types/src/interfaces/nullifier_tree.ts index 90dc6d9a1c7..14fdf426b8d 100644 --- a/yarn-project/types/src/interfaces/nullifier_witness.ts +++ b/yarn-project/types/src/interfaces/nullifier_tree.ts @@ -1,7 +1,6 @@ -import { Fr, NULLIFIER_TREE_HEIGHT } from '@aztec/circuits.js'; +import { Fr, NULLIFIER_TREE_HEIGHT, NullifierLeafPreimage } from '@aztec/circuits.js'; import { SiblingPath } from '../sibling_path.js'; -import { LeafData } from './leaf_data.js'; /** * Nullifier membership witness. @@ -18,7 +17,7 @@ export class NullifierMembershipWitness { /** * Preimage of the nullifier. */ - public readonly leafData: LeafData, + public readonly leafPreimage: NullifierLeafPreimage, /** * Sibling path to prove membership of the nullifier. */ @@ -32,9 +31,9 @@ export class NullifierMembershipWitness { public toFieldArray(): Fr[] { return [ new Fr(this.index), - new Fr(this.leafData.value), - new Fr(this.leafData.nextIndex), - new Fr(this.leafData.nextValue), + new Fr(this.leafPreimage.nullifier), + new Fr(this.leafPreimage.nextIndex), + new Fr(this.leafPreimage.nextNullifier), ...this.siblingPath.toFieldArray(), ]; } diff --git a/yarn-project/types/src/interfaces/pxe.ts b/yarn-project/types/src/interfaces/pxe.ts index 1a2e52c1262..a78cf957bbd 100644 --- a/yarn-project/types/src/interfaces/pxe.ts +++ b/yarn-project/types/src/interfaces/pxe.ts @@ -1,7 +1,6 @@ -import { AztecAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; +import { AztecAddress, CompleteAddress, Fr, GrumpkinPrivateKey, PartialAddress } from '@aztec/circuits.js'; import { AuthWitness, - CompleteAddress, ContractData, ExtendedContractData, ExtendedNote, diff --git a/yarn-project/types/src/interfaces/state_provider.ts b/yarn-project/types/src/interfaces/state_info_provider.ts similarity index 96% rename from yarn-project/types/src/interfaces/state_provider.ts rename to yarn-project/types/src/interfaces/state_info_provider.ts index cec3f6fed55..7818f984dab 100644 --- a/yarn-project/types/src/interfaces/state_provider.ts +++ b/yarn-project/types/src/interfaces/state_info_provider.ts @@ -1,5 +1,5 @@ import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_TREE_HEIGHT, Fr, L1_TO_L2_MSG_TREE_HEIGHT, @@ -12,7 +12,7 @@ import { L1ToL2MessageAndIndex } from '../l1_to_l2_message.js'; import { L2Block } from '../l2_block.js'; import { MerkleTreeId } from '../merkle_tree_id.js'; import { SiblingPath } from '../sibling_path.js'; -import { NullifierMembershipWitness } from './nullifier_witness.js'; +import { NullifierMembershipWitness } from './nullifier_tree.js'; /** Helper type for a specific L2 block number or the latest block number */ type BlockNumber = number | 'latest'; @@ -93,10 +93,7 @@ export interface StateInfoProvider { * @returns The sibling path. * TODO: https://github.com/AztecProtocol/aztec-packages/issues/3414 */ - getBlocksTreeSiblingPath( - blockNumber: BlockNumber, - leafIndex: bigint, - ): Promise>; + getArchiveSiblingPath(blockNumber: BlockNumber, leafIndex: bigint): Promise>; /** * Returns a sibling path for a leaf in the committed public data tree. diff --git a/yarn-project/types/src/l2_block.ts b/yarn-project/types/src/l2_block.ts index 67832ef2da7..2918ba99bd3 100644 --- a/yarn-project/types/src/l2_block.ts +++ b/yarn-project/types/src/l2_block.ts @@ -86,9 +86,9 @@ export class L2Block { */ public startL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot, /** - * The tree snapshot of the blocks tree at the start of the rollup. + * The tree snapshot of the archive at the start of the rollup. */ - public startBlocksTreeSnapshot: AppendOnlyTreeSnapshot = AppendOnlyTreeSnapshot.empty(), + public startArchiveSnapshot: AppendOnlyTreeSnapshot = AppendOnlyTreeSnapshot.empty(), /** * The tree snapshot of the note hash tree at the end of the rollup. */ @@ -110,9 +110,9 @@ export class L2Block { */ public endL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot, /** - * The tree snapshot of the blocks tree at the end of the rollup. + * The tree snapshot of the archive at the end of the rollup. */ - public endBlocksTreeSnapshot: AppendOnlyTreeSnapshot, + public endArchiveSnapshot: AppendOnlyTreeSnapshot, /** * The commitments to be inserted into the note hash tree. */ @@ -216,13 +216,13 @@ export class L2Block { startContractTreeSnapshot: makeAppendOnlyTreeSnapshot(0), startPublicDataTreeRoot: Fr.random(), startL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(0), - startBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(0), + startArchiveSnapshot: makeAppendOnlyTreeSnapshot(0), endNoteHashTreeSnapshot: makeAppendOnlyTreeSnapshot(newCommitments.length), endNullifierTreeSnapshot: makeAppendOnlyTreeSnapshot(newNullifiers.length), endContractTreeSnapshot: makeAppendOnlyTreeSnapshot(newContracts.length), endPublicDataTreeRoot: Fr.random(), endL1ToL2MessagesTreeSnapshot: makeAppendOnlyTreeSnapshot(1), - endBlocksTreeSnapshot: makeAppendOnlyTreeSnapshot(1), + endArchiveSnapshot: makeAppendOnlyTreeSnapshot(1), newCommitments, newNullifiers, newContracts, @@ -277,9 +277,9 @@ export class L2Block { */ startL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot; /** - * The tree snapshot of the blocks tree at the start of the rollup. + * The tree snapshot of the archive at the start of the rollup. */ - startBlocksTreeSnapshot: AppendOnlyTreeSnapshot; + startArchiveSnapshot: AppendOnlyTreeSnapshot; /** * The tree snapshot of the note hash tree at the end of the rollup. */ @@ -301,9 +301,9 @@ export class L2Block { */ endL1ToL2MessagesTreeSnapshot: AppendOnlyTreeSnapshot; /** - * The tree snapshot of the blocks tree at the end of the rollup. + * The tree snapshot of the archive at the end of the rollup. */ - endBlocksTreeSnapshot: AppendOnlyTreeSnapshot; + endArchiveSnapshot: AppendOnlyTreeSnapshot; /** * The commitments to be inserted into the note hash tree. */ @@ -352,13 +352,13 @@ export class L2Block { fields.startContractTreeSnapshot, fields.startPublicDataTreeRoot, fields.startL1ToL2MessagesTreeSnapshot, - fields.startBlocksTreeSnapshot, + fields.startArchiveSnapshot, fields.endNoteHashTreeSnapshot, fields.endNullifierTreeSnapshot, fields.endContractTreeSnapshot, fields.endPublicDataTreeRoot, fields.endL1ToL2MessagesTreeSnapshot, - fields.endBlocksTreeSnapshot, + fields.endArchiveSnapshot, fields.newCommitments, fields.newNullifiers, fields.newPublicDataWrites, @@ -387,13 +387,13 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, this.endNoteHashTreeSnapshot, this.endNullifierTreeSnapshot, this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, this.newCommitments.length, this.newCommitments, this.newNullifiers.length, @@ -449,13 +449,13 @@ export class L2Block { const startContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const startPublicDataTreeRoot = reader.readObject(Fr); const startL1ToL2MessagesTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const startBlocksTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const startArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endNoteHashTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endNullifierTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endContractTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const endPublicDataTreeRoot = reader.readObject(Fr); const endL1ToL2MessagesTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); - const endBlocksTreeSnapshot = reader.readObject(AppendOnlyTreeSnapshot); + const endArchiveSnapshot = reader.readObject(AppendOnlyTreeSnapshot); const newCommitments = reader.readVector(Fr); const newNullifiers = reader.readVector(Fr); const newPublicDataWrites = reader.readVector(PublicDataWrite); @@ -473,13 +473,13 @@ export class L2Block { startContractTreeSnapshot, startPublicDataTreeRoot, startL1ToL2MessagesTreeSnapshot: startL1ToL2MessagesTreeSnapshot, - startBlocksTreeSnapshot, + startArchiveSnapshot, endNoteHashTreeSnapshot, endNullifierTreeSnapshot, endContractTreeSnapshot, endPublicDataTreeRoot, endL1ToL2MessagesTreeSnapshot, - endBlocksTreeSnapshot, + endArchiveSnapshot, newCommitments, newNullifiers, newPublicDataWrites, @@ -589,13 +589,13 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, this.endNoteHashTreeSnapshot, this.endNullifierTreeSnapshot, this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, this.getCalldataHash(), this.getL1ToL2MessagesHash(), ); @@ -615,7 +615,7 @@ export class L2Block { this.startContractTreeSnapshot, this.startPublicDataTreeRoot, this.startL1ToL2MessagesTreeSnapshot, - this.startBlocksTreeSnapshot, + this.startArchiveSnapshot, ); return sha256(inputValue); } @@ -632,7 +632,7 @@ export class L2Block { this.endContractTreeSnapshot, this.endPublicDataTreeRoot, this.endL1ToL2MessagesTreeSnapshot, - this.endBlocksTreeSnapshot, + this.endArchiveSnapshot, ); return sha256(inputValue); } @@ -843,14 +843,14 @@ export class L2Block { `startContractTreeSnapshot: ${inspectTreeSnapshot(this.startContractTreeSnapshot)}`, `startPublicDataTreeRoot: ${this.startPublicDataTreeRoot.toString()}`, `startL1ToL2MessagesTreeSnapshot: ${inspectTreeSnapshot(this.startL1ToL2MessagesTreeSnapshot)}`, - `startBlocksTreeSnapshot: ${inspectTreeSnapshot(this.startBlocksTreeSnapshot)}`, + `startArchiveSnapshot: ${inspectTreeSnapshot(this.startArchiveSnapshot)}`, `endNoteHashTreeSnapshot: ${inspectTreeSnapshot(this.endNoteHashTreeSnapshot)}`, `endNullifierTreeSnapshot: ${inspectTreeSnapshot(this.endNullifierTreeSnapshot)}`, `endContractTreeSnapshot: ${inspectTreeSnapshot(this.endContractTreeSnapshot)}`, `endPublicDataTreeRoot: ${this.endPublicDataTreeRoot.toString()}`, `endPublicDataTreeRoot: ${this.endPublicDataTreeRoot.toString()}`, `endL1ToL2MessagesTreeSnapshot: ${inspectTreeSnapshot(this.endL1ToL2MessagesTreeSnapshot)}`, - `endBlocksTreeSnapshot: ${inspectTreeSnapshot(this.endBlocksTreeSnapshot)}`, + `endArchiveSnapshot: ${inspectTreeSnapshot(this.endArchiveSnapshot)}`, `newCommitments: ${inspectFrArray(this.newCommitments)}`, `newNullifiers: ${inspectFrArray(this.newNullifiers)}`, `newPublicDataWrite: ${inspectPublicDataWriteArray(this.newPublicDataWrites)}`, diff --git a/yarn-project/types/src/merkle_tree_id.ts b/yarn-project/types/src/merkle_tree_id.ts index 55f63259227..fde02472627 100644 --- a/yarn-project/types/src/merkle_tree_id.ts +++ b/yarn-project/types/src/merkle_tree_id.ts @@ -7,7 +7,7 @@ export enum MerkleTreeId { NOTE_HASH_TREE = 2, PUBLIC_DATA_TREE = 3, L1_TO_L2_MESSAGES_TREE = 4, - BLOCKS_TREE = 5, + ARCHIVE = 5, } export const merkleTreeIds = () => { diff --git a/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts b/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts index 29ba293736d..9f3f6976936 100644 --- a/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts +++ b/yarn-project/world-state/src/merkle-tree/merkle_tree_operations_facade.ts @@ -1,6 +1,8 @@ +import { NullifierLeafPreimage } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; import { CurrentTreeRoots, HandleL2BlockResult, MerkleTreeDb, MerkleTreeOperations, TreeInfo } from '../index.js'; @@ -59,16 +61,19 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { getPreviousValueIndex( treeId: MerkleTreeId.NULLIFIER_TREE, value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { return this.trees.getPreviousValueIndex(treeId, value, this.includeUncommitted); } @@ -79,7 +84,7 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { * @param index - The index to insert into. * @returns Empty promise. */ - updateLeaf(treeId: MerkleTreeId.NULLIFIER_TREE, leaf: LeafData, index: bigint): Promise { + updateLeaf(treeId: MerkleTreeId.NULLIFIER_TREE, leaf: NullifierLeafPreimage, index: bigint): Promise { return this.trees.updateLeaf(treeId, leaf, index); } @@ -87,10 +92,14 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { * Gets the leaf data at a given index and tree. * @param treeId - The ID of the tree get the leaf from. * @param index - The index of the leaf to get. - * @returns Leaf data. + * @returns Leaf preimage. */ - getLeafData(treeId: MerkleTreeId.NULLIFIER_TREE, index: number): Promise { - return this.trees.getLeafData(treeId, index, this.includeUncommitted); + async getLeafPreimage( + treeId: MerkleTreeId.NULLIFIER_TREE, + index: bigint, + ): Promise { + const preimage = await this.trees.getLeafPreimage(treeId, index, this.includeUncommitted); + return preimage as IndexedTreeLeafPreimage | undefined; } /** @@ -115,13 +124,12 @@ export class MerkleTreeOperationsFacade implements MerkleTreeOperations { } /** - * Inserts into the roots trees (CONTRACT_TREE_ROOTS_TREE, NOTE_HASH_TREE_ROOTS_TREE) - * the current roots of the corresponding trees (CONTRACT_TREE, NOTE_HASH_TREE). - * @param globalVariablesHash - The hash of the current global variables to include in the block hash. - * @returns Empty promise. + * Inserts the new block hash into the archive. + * This includes all of the current roots of all of the data trees and the current blocks global vars. + * @param globalVariablesHash - The global variables hash to insert into the block hash. */ - public updateBlocksTree(globalVariablesHash: Fr): Promise { - return this.trees.updateBlocksTree(globalVariablesHash, this.includeUncommitted); + public updateArchive(globalVariablesHash: Fr): Promise { + return this.trees.updateArchive(globalVariablesHash, this.includeUncommitted); } /** diff --git a/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts b/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts index 93c94d19163..1fd883b98b5 100644 --- a/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts +++ b/yarn-project/world-state/src/merkle-tree/merkle_tree_snapshot_operations_facade.ts @@ -1,6 +1,7 @@ import { Fr } from '@aztec/circuits.js'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult, IndexedTreeSnapshot, TreeSnapshot } from '@aztec/merkle-tree'; -import { LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { MerkleTreeId, SiblingPath } from '@aztec/types'; import { CurrentTreeRoots, HandleL2BlockResult, MerkleTreeDb, MerkleTreeOperations, TreeInfo } from '../index.js'; @@ -28,23 +29,19 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations async findLeafIndex(treeId: MerkleTreeId, value: Buffer): Promise { const tree = await this.#getTreeSnapshot(treeId); - const numLeaves = tree.getNumLeaves(); - for (let i = 0n; i < numLeaves; i++) { - const currentValue = await tree.getLeafValue(i); - if (currentValue && currentValue.equals(value)) { - return i; - } - } - return undefined; + return tree.findLeafIndex(value); } getLatestGlobalVariablesHash(): Promise { return Promise.reject(new Error('not implemented')); } - async getLeafData(treeId: MerkleTreeId.NULLIFIER_TREE, index: number): Promise { + async getLeafPreimage( + treeId: MerkleTreeId.NULLIFIER_TREE, + index: bigint, + ): Promise { const snapshot = (await this.#getTreeSnapshot(treeId)) as IndexedTreeSnapshot; - return snapshot.getLatestLeafDataCopy(BigInt(index)); + return snapshot.getLatestLeafPreimageCopy(BigInt(index)); } async getLeafValue(treeId: MerkleTreeId, index: bigint): Promise { @@ -55,16 +52,19 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations getPreviousValueIndex( _treeId: MerkleTreeId.NULLIFIER_TREE, _value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { return Promise.reject(new Error('not implemented')); } @@ -90,11 +90,11 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations this.#getTreeSnapshot(MerkleTreeId.NOTE_HASH_TREE), this.#getTreeSnapshot(MerkleTreeId.PUBLIC_DATA_TREE), this.#getTreeSnapshot(MerkleTreeId.L1_TO_L2_MESSAGES_TREE), - this.#getTreeSnapshot(MerkleTreeId.BLOCKS_TREE), + this.#getTreeSnapshot(MerkleTreeId.ARCHIVE), ]); return { - blocksTreeRoot: snapshots[MerkleTreeId.BLOCKS_TREE].getRoot(), + archiveRoot: snapshots[MerkleTreeId.ARCHIVE].getRoot(), contractDataTreeRoot: snapshots[MerkleTreeId.CONTRACT_TREE].getRoot(), l1Tol2MessagesTreeRoot: snapshots[MerkleTreeId.L1_TO_L2_MESSAGES_TREE].getRoot(), noteHashTreeRoot: snapshots[MerkleTreeId.NOTE_HASH_TREE].getRoot(), @@ -113,7 +113,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations return Promise.reject(new Error('Tree snapshot operations are read-only')); } - updateBlocksTree(): Promise { + updateArchive(): Promise { return Promise.reject(new Error('Tree snapshot operations are read-only')); } @@ -129,7 +129,7 @@ export class MerkleTreeSnapshotOperationsFacade implements MerkleTreeOperations return Promise.reject(new Error('Tree snapshot operations are read-only')); } - updateHistoricBlocksTree(): Promise { + updateHistoricArchive(): Promise { return Promise.reject(new Error('Tree snapshot operations are read-only')); } diff --git a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts index d5e3798a7c6..5e90a48f00f 100644 --- a/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts +++ b/yarn-project/world-state/src/synchronizer/server_world_state_synchronizer.test.ts @@ -72,13 +72,13 @@ const getMockBlock = (blockNumber: number, newContractsCommitments?: Buffer[]) = startContractTreeSnapshot: getMockTreeSnapshot(), startPublicDataTreeRoot: Fr.random(), startL1ToL2MessagesTreeSnapshot: getMockTreeSnapshot(), - startBlocksTreeSnapshot: getMockTreeSnapshot(), + startArchiveSnapshot: getMockTreeSnapshot(), endNoteHashTreeSnapshot: getMockTreeSnapshot(), endNullifierTreeSnapshot: getMockTreeSnapshot(), endContractTreeSnapshot: getMockTreeSnapshot(), endPublicDataTreeRoot: Fr.random(), endL1ToL2MessagesTreeSnapshot: getMockTreeSnapshot(), - endBlocksTreeSnapshot: getMockTreeSnapshot(), + endArchiveSnapshot: getMockTreeSnapshot(), newCommitments: times(MAX_NEW_COMMITMENTS_PER_TX, Fr.random), newNullifiers: times(MAX_NEW_NULLIFIERS_PER_TX, Fr.random), newContracts: newContractsCommitments?.map(x => Fr.fromBuffer(x)) ?? [Fr.random()], diff --git a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts index 13c6617513d..4ae2abd0bb0 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_tree_db.ts @@ -1,8 +1,9 @@ -import { MAX_NEW_NULLIFIERS_PER_TX } from '@aztec/circuits.js'; +import { MAX_NEW_NULLIFIERS_PER_TX, NullifierLeafPreimage } from '@aztec/circuits.js'; import { Fr } from '@aztec/foundation/fields'; import { createDebugLogger } from '@aztec/foundation/log'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { BatchInsertionResult, IndexedTreeSnapshot, TreeSnapshot } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; /** * Type alias for the nullifier tree ID. @@ -73,8 +74,8 @@ export type CurrentTreeRoots = { l1Tol2MessagesTreeRoot: Buffer; /** Nullifier data tree root. */ nullifierTreeRoot: Buffer; - /** Blocks tree root. */ - blocksTreeRoot: Buffer; + /** Archive root. */ + archiveRoot: Buffer; /** Public data tree root */ publicDataTreeRoot: Buffer; }; @@ -136,23 +137,26 @@ export interface MerkleTreeOperations { getPreviousValueIndex( treeId: IndexedTreeId, value: bigint, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }>; + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + >; /** * Returns the data at a specific leaf. * @param treeId - The tree for which leaf data should be returned. * @param index - The index of the leaf required. */ - getLeafData(treeId: IndexedTreeId, index: number): Promise; + getLeafPreimage(treeId: IndexedTreeId, index: bigint): Promise; /** * Update the leaf data at the given index. @@ -160,7 +164,7 @@ export interface MerkleTreeOperations { * @param leaf - The updated leaf value. * @param index - The index of the leaf to be updated. */ - updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: LeafData | Buffer, index: bigint): Promise; + updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: NullifierLeafPreimage | Buffer, index: bigint): Promise; /** * Returns the index containing a leaf value. @@ -177,11 +181,11 @@ export interface MerkleTreeOperations { getLeafValue(treeId: MerkleTreeId, index: bigint): Promise; /** - * Inserts the new block hash into the new block hashes tree. + * Inserts the new block hash into the archive. * This includes all of the current roots of all of the data trees and the current blocks global vars. * @param globalVariablesHash - The global variables hash to insert into the block hash. */ - updateBlocksTree(globalVariablesHash: Fr): Promise; + updateArchive(globalVariablesHash: Fr): Promise; /** * Updates the latest global variables hash diff --git a/yarn-project/world-state/src/world-state-db/merkle_trees.ts b/yarn-project/world-state/src/world-state-db/merkle_trees.ts index 311c071d8b1..4ebcdb101c2 100644 --- a/yarn-project/world-state/src/world-state-db/merkle_trees.ts +++ b/yarn-project/world-state/src/world-state-db/merkle_trees.ts @@ -1,5 +1,5 @@ import { - BLOCKS_TREE_HEIGHT, + ARCHIVE_HEIGHT, CONTRACT_TREE_HEIGHT, Fr, GlobalVariables, @@ -7,12 +7,15 @@ import { NOTE_HASH_TREE_HEIGHT, NULLIFIER_SUBTREE_HEIGHT, NULLIFIER_TREE_HEIGHT, + NullifierLeaf, + NullifierLeafPreimage, PUBLIC_DATA_TREE_HEIGHT, } from '@aztec/circuits.js'; import { computeBlockHash, computeGlobalsHash } from '@aztec/circuits.js/abis'; import { Committable } from '@aztec/foundation/committable'; import { SerialQueue } from '@aztec/foundation/fifo'; import { createDebugLogger } from '@aztec/foundation/log'; +import { IndexedTreeLeafPreimage } from '@aztec/foundation/trees'; import { AppendOnlyTree, BatchInsertionResult, @@ -25,7 +28,7 @@ import { loadTree, newTree, } from '@aztec/merkle-tree'; -import { L2Block, LeafData, MerkleTreeId, SiblingPath } from '@aztec/types'; +import { Hasher, L2Block, MerkleTreeId, SiblingPath } from '@aztec/types'; import { default as levelup } from 'levelup'; @@ -53,6 +56,15 @@ interface FromDbOptions { const LAST_GLOBAL_VARS_HASH = 'lastGlobalVarsHash'; +/** + * The nullifier tree is an indexed tree. + */ +class NullifierTree extends StandardIndexedTree { + constructor(db: levelup.LevelUp, hasher: Hasher, name: string, depth: number, size: bigint = 0n, root?: Buffer) { + super(db, hasher, name, depth, size, NullifierLeafPreimage, NullifierLeaf, root); + } +} + /** * A convenience class for managing multiple merkle trees. */ @@ -82,7 +94,7 @@ export class MerkleTrees implements MerkleTreeDb { CONTRACT_TREE_HEIGHT, ); const nullifierTree = await initializeTree( - StandardIndexedTree, + NullifierTree, this.db, hasher, `${MerkleTreeId[MerkleTreeId.NULLIFIER_TREE]}`, @@ -110,14 +122,14 @@ export class MerkleTrees implements MerkleTreeDb { `${MerkleTreeId[MerkleTreeId.L1_TO_L2_MESSAGES_TREE]}`, L1_TO_L2_MSG_TREE_HEIGHT, ); - const blocksTree: AppendOnlyTree = await initializeTree( + const archive: AppendOnlyTree = await initializeTree( StandardTree, this.db, hasher, - `${MerkleTreeId[MerkleTreeId.BLOCKS_TREE]}`, - BLOCKS_TREE_HEIGHT, + `${MerkleTreeId[MerkleTreeId.ARCHIVE]}`, + ARCHIVE_HEIGHT, ); - this.trees = [contractTree, nullifierTree, noteHashTree, publicDataTree, l1Tol2MessagesTree, blocksTree]; + this.trees = [contractTree, nullifierTree, noteHashTree, publicDataTree, l1Tol2MessagesTree, archive]; this.jobQueue.start(); @@ -125,7 +137,7 @@ export class MerkleTrees implements MerkleTreeDb { if (!fromDb) { const initialGlobalVariablesHash = computeGlobalsHash(GlobalVariables.empty()); await this._updateLatestGlobalVariablesHash(initialGlobalVariablesHash); - await this._updateBlocksTree(initialGlobalVariablesHash, true); + await this._updateArchive(initialGlobalVariablesHash, true); await this._commit(); } else { await this._updateLatestGlobalVariablesHash(fromDbOptions.globalVariablesHash); @@ -177,8 +189,8 @@ export class MerkleTrees implements MerkleTreeDb { * @param globalsHash - The current global variables hash. * @param includeUncommitted - Indicates whether to include uncommitted data. */ - public async updateBlocksTree(globalsHash: Fr, includeUncommitted: boolean) { - await this.synchronize(() => this._updateBlocksTree(globalsHash, includeUncommitted)); + public async updateArchive(globalsHash: Fr, includeUncommitted: boolean) { + await this.synchronize(() => this._updateArchive(globalsHash, includeUncommitted)); } /** @@ -221,7 +233,7 @@ export class MerkleTrees implements MerkleTreeDb { contractDataTreeRoot: roots[2], l1Tol2MessagesTreeRoot: roots[3], publicDataTreeRoot: roots[4], - blocksTreeRoot: roots[5], + archiveRoot: roots[5], }; } @@ -237,7 +249,7 @@ export class MerkleTrees implements MerkleTreeDb { MerkleTreeId.CONTRACT_TREE, MerkleTreeId.L1_TO_L2_MESSAGES_TREE, MerkleTreeId.PUBLIC_DATA_TREE, - MerkleTreeId.BLOCKS_TREE, + MerkleTreeId.ARCHIVE, ].map(tree => this.trees[tree].getRoot(includeUncommitted)); return Promise.resolve(roots); @@ -310,19 +322,20 @@ export class MerkleTrees implements MerkleTreeDb { treeId: IndexedTreeId, value: bigint, includeUncommitted: boolean, - ): Promise<{ - /** - * The index of the found leaf. - */ - index: number; - /** - * A flag indicating if the corresponding leaf's value is equal to `newValue`. - */ - alreadyPresent: boolean; - }> { - return await this.synchronize(() => - Promise.resolve(this._getIndexedTree(treeId).findIndexOfPreviousValue(value, includeUncommitted)), - ); + ): Promise< + | { + /** + * The index of the found leaf. + */ + index: bigint; + /** + * A flag indicating if the corresponding leaf's value is equal to `newValue`. + */ + alreadyPresent: boolean; + } + | undefined + > { + return await this.synchronize(() => this._getIndexedTree(treeId).findIndexOfPreviousKey(value, includeUncommitted)); } /** @@ -330,15 +343,15 @@ export class MerkleTrees implements MerkleTreeDb { * @param treeId - The ID of the tree get the leaf from. * @param index - The index of the leaf to get. * @param includeUncommitted - Indicates whether to include uncommitted data. - * @returns Leaf data. + * @returns Leaf preimage. */ - public async getLeafData( + public async getLeafPreimage( treeId: IndexedTreeId, - index: number, + index: bigint, includeUncommitted: boolean, - ): Promise { + ): Promise { return await this.synchronize(() => - Promise.resolve(this._getIndexedTree(treeId).getLatestLeafDataCopy(index, includeUncommitted)), + this._getIndexedTree(treeId).getLatestLeafPreimageCopy(index, includeUncommitted), ); } @@ -356,13 +369,7 @@ export class MerkleTrees implements MerkleTreeDb { ): Promise { return await this.synchronize(async () => { const tree = this.trees[treeId]; - for (let i = 0n; i < tree.getNumLeaves(includeUncommitted); i++) { - const currentValue = await tree.getLeafValue(i, includeUncommitted); - if (currentValue && currentValue.equals(value)) { - return i; - } - } - return undefined; + return await tree.findLeafIndex(value, includeUncommitted); }); } @@ -373,7 +380,7 @@ export class MerkleTrees implements MerkleTreeDb { * @param index - The index to insert into. * @returns Empty promise. */ - public async updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: LeafData | Buffer, index: bigint): Promise { + public async updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: Buffer, index: bigint): Promise { return await this.synchronize(() => this._updateLeaf(treeId, leaf, index)); } @@ -427,9 +434,9 @@ export class MerkleTrees implements MerkleTreeDb { return Promise.resolve(this.latestGlobalVariablesHash.get(includeUncommitted)); } - private async _updateBlocksTree(globalsHash: Fr, includeUncommitted: boolean) { + private async _updateArchive(globalsHash: Fr, includeUncommitted: boolean) { const blockHash = await this._getCurrentBlockHash(globalsHash, includeUncommitted); - await this._appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this._appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); } /** @@ -486,11 +493,7 @@ export class MerkleTrees implements MerkleTreeDb { return await tree.appendLeaves(leaves); } - private async _updateLeaf( - treeId: IndexedTreeId | PublicTreeId, - leaf: LeafData | Buffer, - index: bigint, - ): Promise { + private async _updateLeaf(treeId: IndexedTreeId | PublicTreeId, leaf: Buffer, index: bigint): Promise { const tree = this.trees[treeId]; if (!('updateLeaf' in tree)) { throw new Error('Tree does not support `updateLeaf` method'); @@ -542,7 +545,7 @@ export class MerkleTrees implements MerkleTreeDb { [l2Block.endNoteHashTreeSnapshot.root, MerkleTreeId.NOTE_HASH_TREE], [l2Block.endPublicDataTreeRoot, MerkleTreeId.PUBLIC_DATA_TREE], [l2Block.endL1ToL2MessagesTreeSnapshot.root, MerkleTreeId.L1_TO_L2_MESSAGES_TREE], - [l2Block.endBlocksTreeSnapshot.root, MerkleTreeId.BLOCKS_TREE], + [l2Block.endArchiveSnapshot.root, MerkleTreeId.ARCHIVE], ] as const; const compareRoot = (root: Fr, treeId: MerkleTreeId) => { const treeRoot = this.trees[treeId].getRoot(true); @@ -589,7 +592,7 @@ export class MerkleTrees implements MerkleTreeDb { this.log(`Synced global variables with hash ${globalVariablesHash}`); const blockHash = await this._getCurrentBlockHash(globalVariablesHash, true); - await this._appendLeaves(MerkleTreeId.BLOCKS_TREE, [blockHash.toBuffer()]); + await this._appendLeaves(MerkleTreeId.ARCHIVE, [blockHash.toBuffer()]); await this._commit(); } diff --git a/yarn-project/yarn-project-base/Dockerfile b/yarn-project/yarn-project-base/Dockerfile index a4ee8c6b91e..ee9c4f4f6b7 100644 --- a/yarn-project/yarn-project-base/Dockerfile +++ b/yarn-project/yarn-project-base/Dockerfile @@ -47,13 +47,13 @@ FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/bb.js as bb.js FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir as noir FROM 278380418400.dkr.ecr.eu-west-2.amazonaws.com/noir-packages as noir-packages -FROM node:18-alpine +FROM node:18.19.0-alpine RUN apk update && apk add --no-cache bash jq curl # Copy L1 contracts. COPY --from=contracts /usr/src/l1-contracts /usr/src/l1-contracts # Copy in bb.js -COPY --from=bb.js /usr/src/barretenberg/ts/package /usr/src/barretenberg/ts/package +COPY --from=bb.js /usr/src/barretenberg/ts /usr/src/barretenberg/ts # Copy in nargo COPY --from=noir /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo # Copy in noir packages @@ -66,22 +66,28 @@ RUN mkdir /usr/src/noir/compiler && mv /usr/src/noir/packages/source-resolver /u # We install a symlink to yarn-project's node_modules at a location that all portalled packages can find as they # walk up the tree as part of module resolution. The supposedly idiomatic way of supporting module resolution # correctly for portalled packages, is to use --preserve-symlinks when running node. -# This does work, but jest doesn't honor it correctly, so this seems like a neat workaround. +# This does kind of work, but jest doesn't honor it correctly, so this seems like a neat workaround. +# Also, --preserve-symlinks causes duplication of portalled instances such as bb.js, and breaks the singleton logic +# by initialising the module more than once. So at present I don't see a viable alternative. RUN ln -s /usr/src/yarn-project/node_modules /usr/src/node_modules WORKDIR /usr/src/yarn-project # The dockerignore file ensures the context only contains package.json and tsconfig.json files. COPY . . +# List all included files and hash for debugging. +RUN echo "Context files: " && find . -type f | sort && \ + echo -n "Context hash: " && find . -type f -print0 | sort -z | xargs -0 sha256sum | sha256sum + # Install packages and rebuild the global cache with hard links. # TODO: Puppeteer is adding ~300MB to this image due to chrome download (part of e2e). # Switch to using puppeteer-core then it won't download chrome. For now just erase. RUN yarn --immutable && rm -rf /root/.cache/puppeteer && /bin/bash -c '\ - rm -rf /root/.yarn/berry/cache/* && \ - cd .yarn/cache && \ - for F in *; do \ - [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ - done' + rm -rf /root/.yarn/berry/cache/* && \ + cd .yarn/cache && \ + for F in *; do \ + [[ $F =~ (.*-) ]] && ln $F /root/.yarn/berry/cache/${BASH_REMATCH[1]}8.zip; \ + done' # If everything's worked properly, we should no longer need access to the network. RUN echo "enableNetwork: false" >> .yarnrc.yml diff --git a/yarn-project/yarn-project-base/Dockerfile.dockerignore b/yarn-project/yarn-project-base/Dockerfile.dockerignore index a6ba1856c45..257a2d74457 100644 --- a/yarn-project/yarn-project-base/Dockerfile.dockerignore +++ b/yarn-project/yarn-project-base/Dockerfile.dockerignore @@ -6,7 +6,9 @@ .* README.md bootstrap.sh -Dockerfile +Dockerfile* +*.tsbuildinfo +node_modules # This is a sticking point, due to the project being under it's own dir. # Need to unexclude the dir and then exclude it's files. @@ -17,10 +19,12 @@ Dockerfile !boxes/blank !boxes/blank-react boxes/*/* +!boxes/*/package.json +!boxes/*/tsconfig.json # Unexclude package.json and yarn.lock files, for detecting any dependency changes. -!**/package.json -!**/package.*.json +!*/package.json +!*/package.*.json !yarn.lock # Unexclude parts of yarn related config as this also affects how dependencies are installed. @@ -30,7 +34,7 @@ boxes/*/* !.yarn/patches # Unexclude tsconfig files for running project reference checks. -!**/tsconfig.json +!*/tsconfig.json # Unexclude scripts we use in the Dockerfile. !yarn-project-base/scripts diff --git a/yarn-project/yarn.lock b/yarn-project/yarn.lock index 4c4ad8d3a13..935cf87829a 100644 --- a/yarn-project/yarn.lock +++ b/yarn-project/yarn.lock @@ -277,9 +277,9 @@ __metadata: languageName: unknown linkType: soft -"@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A.": +"@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A.": version: 0.0.0-use.local - resolution: "@aztec/bb.js@portal:../barretenberg/ts/package::locator=%40aztec%2Faztec3-packages%40workspace%3A." + resolution: "@aztec/bb.js@portal:../barretenberg/ts::locator=%40aztec%2Faztec3-packages%40workspace%3A." dependencies: comlink: ^4.4.1 commander: ^10.0.1 @@ -549,6 +549,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/merkle-tree@workspace:merkle-tree" dependencies: + "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" "@aztec/types": "workspace:^" "@jest/globals": ^29.5.0 @@ -572,6 +573,7 @@ __metadata: version: 0.0.0-use.local resolution: "@aztec/noir-compiler@workspace:noir-compiler" dependencies: + "@aztec/circuits.js": "workspace:^" "@aztec/foundation": "workspace:^" "@jest/globals": ^29.5.0 "@ltd/j-toml": ^1.38.0 @@ -7333,9 +7335,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" - "@aztec/types": "workspace:^" "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 "@types/node": ^20.5.9 @@ -7384,8 +7383,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 "@typescript-eslint/eslint-plugin": ^6.0.0 @@ -19100,9 +19097,6 @@ __metadata: dependencies: "@aztec/aztec-ui": ^0.1.14 "@aztec/aztec.js": "workspace:^" - "@aztec/circuits.js": "workspace:^" - "@aztec/foundation": "workspace:^" - "@aztec/types": "workspace:^" "@jest/globals": ^29.6.4 "@types/jest": ^29.5.0 "@types/mocha": ^10.0.3 diff --git a/yellow-paper/Dockerfile b/yellow-paper/Dockerfile index 1d9939128e9..ab8cb91196d 100644 --- a/yellow-paper/Dockerfile +++ b/yellow-paper/Dockerfile @@ -1,4 +1,4 @@ -FROM node:18-alpine +FROM node:18.19.0-alpine WORKDIR /usr/src COPY . . RUN yarn && yarn build --no-minify \ No newline at end of file diff --git a/yellow-paper/docs/calls/public_private_messaging.md b/yellow-paper/docs/calls/public_private_messaging.md index c418429f2e4..241c796df9f 100644 --- a/yellow-paper/docs/calls/public_private_messaging.md +++ b/yellow-paper/docs/calls/public_private_messaging.md @@ -10,33 +10,55 @@ sidebar_position: 5 This is a draft. These requirements need to be considered by the wider team, and might change significantly before a mainnet release. ::: -Private functions work by providing evidence of correct execution generated locally through kernel proofs. Public functions, on the other hand, are able to utilize the latest state to manage updates and perform alterations. As such, public state and private state are in different trees. In a private function you cannot reference or modify public state and vice versa. - +Public state and private state exist in different trees. In a private function you cannot reference or modify public state. Yet, it should be possible for: 1. private functions to call private or public functions 2. public functions to call private or public functions -For private execution, the user executed methods locally and presents evidence of correct execution as part of their transaction in the form of a kernel proof (generated locally on user device ahead of time). This way, the builder doesn't need to have knowledge of everything happening in the transaction, only the results. However, public functions are executed at the "tip" of the chain (i.e. make use of the latest updates), they can only be done by a builder who is aware of all the changes. Therefore a public function can't be executed locally by the user in the same way a private function is, as it would lead to race conditions, if the user doesn't keep track of the latest updates of the chain. If we were to build this public proof on the latest state, we would encounter problems. How can two different users build proofs at the same time, given that they will be executed one after the other by the sequencer? The simple answer is that they cannot, as race conditions would arise where one of the proofs would be invalidated by the other due to a change in the state root (which would nullify Merkle paths). +Private functions are executed locally by the user and work by providing evidence of correct execution generated locally through kernel proofs. This way, the sequencer doesn't need to have knowledge of everything happening in the transaction, only the results. Public functions, on the other hand, are able to utilize the latest state to manage updates and perform alterations, as they are executed by the sequencer. -As a result, private functions are always executed first, as they are executed on a state $S_i$, where $i \le n$, with $S_n$ representing the current state where the public functions always operate on the current state $S_n$. +Therefore, private functions are always executed first, as they are executed on a state $S_i$, where $i \le n$, with $S_n$ representing the current state where the public functions always operate on the current state $S_n$. This enables private functions to enqueue calls to public functions. But vice-versa is not true. Since private functions execute first, it cannot "wait" on the results of any of their calls to public functions. Stated differently, any calls made across domains are unilateral in nature. The figure below shows the order of function calls on the left-hand side, while the right-hand side shows how the functions will be executed. Notably, the second private function call is independent of the output of the public function and merely occurs after its execution. -![Public - Private Ordering](./images/calls/pvt_pub_ordering.png) +Tx call order be: +```mermaid +graph TD + A[Private Function 1] -->|Calls| B(Public Function 1) + A -----> |Followed by| C[Private Function 2] +``` -## Private to Public Messaging -If a private function in an Aztec smart contract wants to call a public function, it gets pushed into a separate public call stack that is enqueued. The private kernel circuit which must prove the execution of the private function(s), then hashes each of the item in the call stack and returns that. The private kernel proof, the public inputs of the private kernel (which contain the hash of the each of the public call stack item) and other transaction data (like enqueued public function calls, new commitments, nullifiers etc) get passed along to the sequencer. Sequencer then picks up the public call stack item and executes each of the functions. The Public VM which executes the methods then verifies that the hash provided by the private kernel matches the current call stack item. +But Tx execution order will be + +```mermaid +graph TD + A[Private Function 1] -->|Calls| B(Private Function 2) + A -----> |Followed by| C[Public Function 1] +``` -This way, you can destroy your private state and create them in public within the same transaction or indirectly assert constraints on the execution of the private functions with latest data. +## Private to Public Messaging +When a private function calls a public function: +1. Public function args get hashed together +1. A public call stack item is created with the public function selector, it's contract address and args hash +1. The hash of the item gets enqueued into a separate public call stack and passed as inputs to the private kernel +1. The private kernel pushes these hashes into the public input, which the sequencer can see. +1. PXE creates a transaction object as outlined [here](../transactions/tx-object.md) where it passes the hashes and the actual call stack item +1. PXE sends the transaction to the sequencer. +1. Sequencer then picks up the public call stack item and executes each of the functions. +1. The Public VM which executes the methods then verifies that the hash provided by the private kernel matches the current call stack item. ### Handling Privacy Leakage and `msg.sender` In the above design, the sequencer only sees the public part of the call stack along with any new commitments, nullifiers etc that were created in the private transaction i.e. should learns nothing more of the private transaction (such as its origin, execution logic etc). -But what if the enqueued public function makes use of `msg_sender` which is meant to use +:::warning +TODO: Haven't finalized what msg.sender will be +::: + +Within the context of these enqueued public functions, any usage of `msg_sender` should return **TODO**. If the `msg_sender` is the actual user, then it leaks privacy. If `msg_sender` is the contract address, this leaks which contract is calling the public method and therefore leaks which contract the user was interacting with in private land. -Specifically, when the call stack is passed to the kernel circuit, the kernel should assert the `msg_sender` is 0 and hash appropriately. `msg_sender` could be the contract address too instead of `0`, but it leaks which contract is calling the public method and therefore leaks which contract the user was interacting with in private land. +Therefore, when the call stack is passed to the kernel circuit, the kernel should assert the `msg_sender` is 0 and hash appropriately. ### Reverts diff --git a/yellow-paper/docs/contracts/images/com-abs-6.png b/yellow-paper/docs/contracts/images/com-abs-6.png new file mode 100644 index 00000000000..20ec6779ef0 Binary files /dev/null and b/yellow-paper/docs/contracts/images/com-abs-6.png differ diff --git a/yellow-paper/docs/contracts/index.md b/yellow-paper/docs/contracts/index.md new file mode 100644 index 00000000000..ef08e33450a --- /dev/null +++ b/yellow-paper/docs/contracts/index.md @@ -0,0 +1,342 @@ +--- +title: Cross-chain communication +--- + +This section describes what our L1 contracts do, what they are responsible for and how they interact with the circuits. + +Note that the only reason that we even have any contracts is to facilitate cross-chain communication. The contracts are not required for the rollup to function, but required to bridge assets and to reduce the cost of light nodes. + +:::info Purpose of contracts +The purpose of the L1 contracts are simple: +- Facilitate cross-chain communication such that L1 liquidity can be used on L2 +- Act as a validating light node for L2 that every L1 node implicitly run +::: + +## Message Bridges + +To let users communicate between L1 and the L2, we are using message bridges, namely an L1 inbox that is paired to an L2 outbox, and an L2 inbox that is paired to an L1 outbox. + +![Alt text](images/com-abs-6.png) + +:::info Naming is based from the PoV of the state transitioner. +::: + +While we logically have 4 boxes, we practically only require 3 of those. The L2 inbox is not real - but only logical. This is due to the fact that they are always inserted and then consumed in the same block! Insertions require a L2 transaction, and it is then to be consumed and moved to the L1 outbox by the state transitioner in the same block. + +### Portals + +When deploying a contract on L2, it is possible to specify its "portal" address. This is an immutable variable, that can be used to constrain who the L2 contract expect messages from, and who it sends to. + +In the current paradigm, any messages that are sent from the L2 contract to L1 MUST be sent to the portal address. This was to get around the access control issue of private execution and is enforced in the kernel. It practically gives us a 1:M relationship between L1 and L2, where one L1 contract can be specified as the portal for many L2, and communicate with all of them, but each L2 can only communicate with a single L1 contract. + +:::warning Comment for discussion +Plainly speaking, we don't need to restrict the recipient of the message to a single address. We could let the contract itself figure it out. We restricted it for reasons above, but we could lift this requirement. As long as the portal address exists, it CAN be used to constrain it like this. + +Further comment on this later. +::: + +### Messages + +Messages that are communicated between the L1 and L2 need to contain a minimum of information to ensure that they can correctly consumed by users. Specifically the messages should be as described below: + +```solidity +struct L1Actor { + address: actor, + uint256: chainId, +} + +struct L2Actor { + bytes32: actor, + uint256: version, +} + +struct L1ToL2Msg { + L1Actor: sender, + L2Actor: recipient, + bytes32: content, + bytes32: secretHash, + uint32 deadline, + uint64 fee, +} + +struct L2ToL1Msg { + L2Actor: sender, + L1Actor: recipient, + bytes32: content, +} +``` + +Beware, that while we speak of messages, we are practically passing around only their **hashes** to reduce cost. The `version` value of the `L2Actor` is the version of the rollup, intended to be used allow specifying what version of the rollup the message is intended for or sent from. + +:::info Why a single hash? +Persistent storage is expensive so to reduce overhead we only commit to the messages and then "open" these for consumption later. We need a hash function that is relatively cheap on both L1 and L2, we chose a modded SHA256 to fit the output value into a single field element. +::: + +Some additional discussion/comments on the message structure can be found in [The Republic](https://forum.aztec.network/t/the-republic-a-flexible-optional-governance-proposal-with-self-governed-portals/609/2#supporting-pending-messages-5). + +Since any data that is moving from one chain to the other at some point will live on L1, it will be PUBLIC. While this is fine for L1 consumption (which is public in itself), we want to ensure that the L2 consumption can be private. +To support this, we use a nullifier scheme similar to what we are doing for all the other notes (**REFERENCE**). As part of the nullifier computation we then use the `secret` which hashes to the `secretHash`, this ensures that only actors with knowledge of `secret` will be able to see when it is spent on L2. + +Any message that is consumed on one side MUST be moved to the other side. This is to ensure that the messages exist AND are only consumed once. The L1 contracts can handle one side, but the circuits must handle the other. + +:::info Is `secretHash` required? +We are using the `secretHash` to ensure that the user can spend the message privately with a generic nullifier computation. However, as the nullifier computation is almost entirely controlled by the app circuit (except the siloing, **REFERENCE**) applications could be made to simply use a different nullifier computation and have it become part of the content. However, this reduces the developer burden and is quite easy to mess up. For those reasons we have decided to use the `secretHash` as part of the message. +::: + +### Inbox +When we say inbox, we are generally referring to the L1 contract that handles the L1 to L2 messages. + +The inbox is logically a [multi-set](https://en.wikipedia.org/wiki/Multiset) that builds messages based on the caller and user-provided content (multi-set meaning that repetitions are allowed). While anyone can insert messages into the inbox, only the recipient state transitioner can consume messages from it (as specified by the version). When the state transitioner is consuming a message, it MUST insert it into the "L2 outbox" (message tree). + +When a message is inserted into the inbox, the inbox **MUST** fill in the following fields: +- `L1Actor.actor`: The sender of the message (the caller), `msg.sender` +- `L1Actor.chainId`: The chainId of the L1 chain sending the message, `block.chainId` + +We MUST populate these values in the inbox, since we cannot rely on the user providing anything meaningful. From the `L1ToL2Msg` we compute a hash of the message. This hash is what is moved by the state transitioner to the L2 outbox. + +Since message from L1 to L2 can be inserted independently of the L2 block, the message transfer (insert into inbox move to outbox) are not synchronous as it is for L2 to L1. This means that the message can be inserted into the inbox, but not yet moved to the outbox. The message will then be moved to the outbox when the state transitioner is consuming the message as part of a block. Since the sequencers are responsible for the ordering of the messages, there is not a known time for this pickup to happen, it is async. + +This is done to ensure that the messages are not used to DOS the state transitioner. If the state transitioner was forced to pick up the messages in a specific order or at a fixed rate, it could be used to DOS the state transitioner by inserting a message just before an L2 block goes through. +While this can be addressed by having a queue of messages and let the sequencer specify the order, this require extra logic and might be difficult to price correctly. To keep this out of protocol, we simply allow the user to attach a fee to the message (see `fee` in `L1ToL2Msg` above). This way, the user can incentivize the sequencer to pick up the message faster. + +Since it is possible to land in a case where the sequencer will never pick up the message (e.g., if it is underpriced), the sender must be able to cancel the message. To ensure that this cancellation cannot happen under the feet of the sequencer we use a `deadline`, only after the deadline can it be cancelled. + +The contract that sent the message must decide how to handle the cancellation. It could for example ignore the cancelled message, or it could refund the user. This is up to the contract to decide. + +:::info Error handling +While we have ensured that the message either arrives to the L2 outbox or is cancelled, we have not ensured that the message is consumed by the L2 contract. This is up to the L2 contract to handle. If the L2 contract does not handle the message, it will be stuck in the outbox forever. Similarly, it is up to the L1 contract to handle the cancellation. If the L1 contract does not handle the cancellation, the user might have a message that is pending forever. Error handling is entirely on the contract developer. +::: + +##### L2 Inbox +While the L2 inbox is not a real contract, it is a logical contract that apply mutations to the data similar to the L1 inbox to ensure that the sender cannot fake his position. This logic is handled by the kernel and rollup circuits. + +Just like the L1 variant, we must populate some fields: +- `L2Actor.actor`: The sender of the message (the caller) [also in L1 inbox] +- `L2Actor.version`: The version of the L2 chain sending the message [also in L1 inbox] +- `L1Actor.actor` The recipient of the message (the portal) +- `L1Actor.chainId` The chainId of the L1 chain receiving the message + +In practice, this is done in the kernel circuit of the L2, and the message hash is a public output of the circuit that is inserted into the L1 outbox for later consumption. + +:::warning Comment for discussion +Note that while we are letting the inbox populate more values that what we did for the L1 inbox. This is more an opinionated decision than a purely technical one. + +We could let the contract itself populate the `L1Actor` like we did for L1, but we decided to let the kernel do it instead, since access control can be quite tedious to get right in private execution. By having the `portal` contract that is specified at the time of deployment, we can insert this value and ensure that it is controlled by the contract. +If we have a better alternative for access control this could be changed to be more similar to the L1 inbox, which gives better flexibility. +::: + +### Outbox +The outboxes are the location where a user can consume messages from. An outbox can only contain elements that have previously been removed from an inbox. + +Our L1 outbox is pretty simple, Like the L1 inbox, it is a multi-set. It should allow the state transitioner to insert messages and the recipient of the message can consume it (removing it from the outbox). + +:::info Checking sender +When consuming a message on L1, the portal contract must check that it was sent from the expected contract given that it is possible for multiple contracts on L2 to send to it. If the check is not done this could go horribly wrong. +::: + +#### L2 Outbox +The L2 outbox is quite different. It is a merkle tree that is populated with the messages moved by the state transitioner. As mentioned earlier, the messages are consumed on L2 by emitting a nullifier from the application circuit. + +This means that all validation is done by the application circuit. The application should: +- Ensure that the message exists in the outbox (message tree) +- Ensure that the message sender is the expected contract +- Ensure that the message recipient is itself and that the version matches +- Ensure that the user knows `secret` that hashes to the `secretHash` of the message +- Compute a nullifier that includes the `secret` along with the msg hash and the index of the message in the tree + - The index is included to ensure that the nullifier is unique for each message + +## Registry +The registry is a contract that holds the current and historical addresses of the core rollup contracts. The addresses of a rollup deployment are contained in a snapshot, and the registry is tracking version-snapshot pairs. Depending on the upgrade scheme, it might be used to handle upgrades, or it could entirely be removed. It is generally the one address that a node MUST know about, as it can then tell the node where to find the remainder of the contracts. This is for example used when looking for the address new L2 blocks should be published to. + +## State transitioner +The state transitioner is the heart of the validating light node for the L2. Practically this means that the contract keeps track of the current state of the L2 and progresses this state when a valid L2 block is received. It also facilitates cross-chain communication (communication between the L1 inbox and outbox contracts). + +When new blocks are to be processed, the state transitioner receives the `header` of the block, and commitments to its content (following the same scheme as the rollup circuits) from the Decoder. The header definition can be found in **REFERENCE**, but is commitments to the state before and after the block. + + +### Decoder +The state transitioner should be connected to a decoder which addresses the decode validity condition, and feeds the outputs back into the State transitioner. The action of preparing outputs for the state transitioner should be independent from the processing of a proof, that way allowing for multi-transaction setups. + +In a solo-DA paradigm there will be just one decoder, which can be integrated into the state transitioner, but for multi-layer DA setups, the decoders SHOULD be separate contracts. + + +## Validity conditions (constraints) +While there are multiple contracts, they work in unison to ensure that the rollup is valid and that messages are correctly moved between the chains. In practice this means that the contracts are to ensure that the following constraints are met in order for the validating light node to accept a block. + +Note that some conditions are marked as SHOULD, which is not strictly needed for security of the rollup, but the security of the individual applications or for UX. + +- **Decode**: + - A commitment to the block content must be computed following the same scheme as the rollup circuits using only PUBLISHED DATA. See **REFERENCE** for more details on the commitment computation. + - A commitment to the L1 to L2 messages must be computed following the same scheme as the rollup circuits using only PUBLISHED DATA. See **REFERENCE** for more details on the commitment computation. +- **Header Validation**: + - The starting state of the block (derived from the header) MUST match the state stored in the contract + - The global variables defined by the header MUST be valid: + - The block number MUST be the next block number + - The timestamp MUST: + - be newer than the previous block inclusion + - not be in the future (if l1 time is less than l2 time we are in the future) + - The version MUST be the same as the current version + - The chainId MUST be the same as the current chainId + - The ending state of the block (derived from the header) MUST *replace* the state stored in the contract + - Requires ALL `MUST` constraints to be met +- **Proof validation**: The proof MUST be valid with the public inputs hash + - A single public input hash MUST be computed from the block header, the commitment to the block content and the commitment to L1 to L2 messages. +- **State update**: The state root MUST be set to the ending state value +- **Inserting messages**: for messages that are inserted into the inboxes: + - The `sender.actor` MUST be the caller + - The `(sender|recipient).chainId` MUST be the chainId of the L1 where the state transitioner is deployed + - The `(sender|recipient).version` MUST be the version of the state transitioner (the version of the L2 specified in the L1 contract) + - The `content` MUST fit within a field element + - For L1 to L2 messages: + - The `deadline` MUST be in the future, `> block.timestamp` + - The `secretHash` MUST fit in a field element + - The caller MAY append a `fee` to incentivize the sequencer to pick up the message +- **Message Cancellation**: To remove messages from the L1 inbox: + - The message MUST exist in the inbox + - The caller MUST be `sender.actor` + - The current time (`block.timestamp`) MUST be larger than the `deadline` + - The `fee` SHOULD be refunded to the caller +- **Moving messages**: + - Moves MUST be atomic: + - Any message that is inserted into an outbox MUST be consumed from the matching inbox + - Any message that is consumed from an inbox MUST be inserted into the matching outbox + - Messages MUST be moved by the state transitioner whose `version` match the `version` of the message +- **Consuming messages**: for messages that are consumed from the outboxes: + - L2 to L1 messages (on L1): + - The consumer (caller) MUST match the `recipient.actor` + - The consumer chainid MUST match the `recipient.chainId` + - The consumer SHOULD check the `sender` + - L1 to L2 messages (on L2): + - The consumer contract SHOULD check the `sender` details against the `portal` contract + - The consumer contract SHOULD check that the `secret` is known to the caller + - The consumer contract SHOULD check the `recipient` details against its own details + - The consumer contract SHOULD emit a nullifier to preventing double-spending + - The consumer contract SHOULD check that the message exists in the state + +:::info +- We compute a single hash since each public input increases the costs of proof verification. +- Time constraints might change depending on the exact sequencer selection mechanism. +::: + +## Logical Execution +Below, we will outline the **LOGICAL** execution of a L2 block and how the contracts interact with the circuits. We will be executing cross-chain communication before and after the block itself. Note that in reality, the L2 inbox does not exists, and its functionality is handled by the kernel and the rollup circuits. + + +```mermaid +sequenceDiagram + autonumber + title Logical Interactions of Crosschain Messages + + participant P2 as Portal (L2) + + participant I2 as Inbox (L2) + participant O2 as Outbox (L2) + participant R2 as Rollup (L2) + participant R as Validating Light Node (L1) + participant Reg as Registry + participant I as Inbox + participant O as Outbox + + participant P as Portal + + P->>I: Send msg to L2 + I->>I: Populate msg values + I->>I: Update state (insert) + + loop block in chain + + loop tx in txs + + loop msg in tx.l1ToL2Consume + P2->>O2: Consume msg + O2->>O2: Validate msg + O2->>O2: Update state (nullify) + end + + loop msg in tx.l2ToL1Msgs + P2->>I2: Add msg + I2->>I2: Populate msg values + I2->>I2: Update state (insert) + end + end + + loop msg in L2 inbox + R2->>O2: Consume msg + O2->>O2: Update state (delete) + end + + loop msg in l1ToL2Msgs + R2->>O2: Insert msg + O2->>O2: Update state (insert) + end + + R2->>R: Block (Proof + Data) + + R->>R: Verify proof + R->>R: Update State + + R->>Reg: Where is the Inbox? + Reg->>R: Here is the address + + R->>I: Consume l1ToL2Msgs from L1 + I->>I: Update state (delete) + + R->>Reg: Where is the Outbox? + Reg->>R: Here is the address + + R->>O: Insert Messages from L2 + O->>O: Update state (insert) + + end + + P->>O: Consume a msg + O->>O: Validate msg + O->>O: Update state (delete) +``` +We will walk briefly through the steps of the diagram above. + +1. A portal contracts on L1 wants to send a message for L2 +1. The L1 inbox populates the message with `sender` information +1. The L1 inbox contract inserts the message into its storage +1. On the L2, as part of a L2 block, a transaction tries to consume a message from the L2 outbox. +1. The L2 outbox ensures that the message is included, and that the caller is the recipient and knows the secret to spend. (This is practically done by the application circuit) +1. The nullifier of the message is emitted to privately spend the message (This is practically done by the application circuit) +1. The L2 contract wishes to send a message to L1 +1. The L2 inbox populates the message with `sender` and `recipient` information +1. The L2 inbox inserts the message into its storage +1. The rollup circuit starts consuming the messages from the inbox +1. The L2 inbox deletes the messages from its storage +1. The L2 block includes messages from the L1 inbox that are to be inserted into the L2 outbox. +1. The outbox state is updated to include the messages +1. The L2 block is submitted to L1 +1. The state transitioner receives the block and verifies the proof + validate constraints on block. +1. The state transitioner updates it state to the ending state of the block +1. The state transitioner ask the registry for the L1 inbox address +1. The state transitioner retrieves the L1 inbox address +1. The state transitioner consumes the messages from the L1 inbox that was specified in the block. Note that they have logically been inserted into the L2 outbox, ensuring atomicity. +1. The L1 inbox updates it local state by deleting the messages that was consumed +1. The state transitioner ask the registry for the L1 outbox address +1. The state transitioner retrieves the L1 outbox address +1. The state transitioner inserts the messages into the L1 inbox that was specified in the block. Note that they have logically been consumed from the L2 outbox, ensuring atomicity. +1. The L1 outbox updates it local state by inserting the messages +1. The portal later consumes the message from the L1 outbox +1. The L1 outbox validates that the message exists and that the caller is the recipient +1. The L1 outbox updates it local state by deleting the message + +:::info L2 inbox is not real +As should be clear from above, the L2 inbox doesn't need to exist for itself, it keeps no state between blocks, as every message created in the block will also be consumed in the same block. +::: + + +## Future work +- Sequencer selection contract(s) + - Relies on the sequencer selection scheme being more explicitly defined + - Relies on being able to validate the sequencer selection scheme +- Improve public inputs hash computation + - Currently it is using calldata and blocks to be passed along with the proof, but it should be adapted to better allow other DA layers. + - Modularize the computation such that the state transitioner need not know the exact computation but merely use a separate contract as an oracle. +- Governance/upgrade contract(s) + - Relies on the governance/upgrade scheme being more explicitly defined +- Explore getting rid of the specific 1:M relationship between L1 and L2 +- Forced transaction inclusion + - While we don't have an exact scheme, an outline was made in [hackmd](https://hackmd.io/@aztec-network/S1lRcMkvn?type=view) and the [forum](https://forum.aztec.network/t/forcing-transactions/606) \ No newline at end of file diff --git a/yellow-paper/docs/decentralisation/decentralisation.md b/yellow-paper/docs/decentralisation/decentralisation.md index e69de29bb2d..1853cf324ac 100644 --- a/yellow-paper/docs/decentralisation/decentralisation.md +++ b/yellow-paper/docs/decentralisation/decentralisation.md @@ -0,0 +1,5 @@ +--- +sidebar_position: 1 +--- + +# Decentralisation \ No newline at end of file diff --git a/yellow-paper/docs/decentralisation/p2p-network.md b/yellow-paper/docs/decentralisation/p2p-network.md index 90d415b48ae..8ffa1179465 100644 --- a/yellow-paper/docs/decentralisation/p2p-network.md +++ b/yellow-paper/docs/decentralisation/p2p-network.md @@ -1,41 +1,68 @@ +--- +sidebar_position: 1 +--- + +# P2P Network + ## Requirements for a P2P Network :::info Disclaimer This is a draft. These requirements need to be considered by the wider team, and might change significantly before a mainnet release. ::: -When rollups are successfully published, the state transitions are published along with it and are publically retrievable. This category of state does not depend on the Aztec network for its persistence or distribution. Transient data such as pending user transactions for inclusion in future rollups however does rely on the network for these functions. Network participants will consist of: +When a rollup is successfully published, the state transitions it produces are published along with it, making them publicly available. This broadcasted state does not depend on the Aztec network for its persistence or distribution. Transient data however, such as pending user transactions for inclusion in future rollups, does rely on the network for this. It is important that the network provides a performant, permissionless and censorship resistant mechanism for the effective propagation of these transactions to all network participants. Without this, transactions may be disadvantaged and the throughput of the network will deteriorate. + +Other data that may be transmitted over the network are the final rollup proofs to be submitted to the rollup contract, however the size and rate of these payloads should not make any meaningful impact on the bandwidth requirements. + +### Network Participants + +For the purpose of this discussion, we define the 'Aztec Network' as the set of components required to ensure the continual distribution of user transactions and production of rollups. The participants in such a network are: * Sequencers - responsible for selecting transactions from the global pool and including them in rollups * Provers - responsible for generating zk-proofs for the transaction and rollup circuits +* Transaction Pool Nodes - responsible for maintaining a local representation of the pending transaction pool +* Bootnodes - responsible for providing an entrypoint into the network for new participants + +Sequencers and Provers will likely run their own transaction pools but it is important that the ability to do so is not limited to these participants. Anyone can operate a transaction pool, providing increased privacy and censorship resistance. + +Client PXEs will not interact directly with the network but instead via instances of the Aztec Node and it's JSON RPC interface. The Aztec Node in turn will publish user transactions to the network. + +### Network Topology and Transaction Submission + +The network will likely be based on the LibP2P specification. + +#### Discovery -Pending transactions will be the primary category of data being transmitted through the network. It is important that the network provides a performant, permissionless and censorship resistant mechanism for the effective propagation of these transactions to all sequencers. Without this, transactions may be disadvantaged and the throughput of the network will deteriorate. +When a node such as a sequencer joins the network for the first time it will need to contact a bootnode. This will be one of a hardcoded set of nodes whose sole purpose is to provide an initial set of peers to connect with. Once a node has made contact with and stored address data for a number of peers it should no longer need to contact the bootnodes, provided it's peers remain available. -Other data that may be transmitted over the network are the final rollup proofs to be submitted to the rollup contract, the size and rate of these payloads should not make any meaningful impact on the bandwidth requirements. +#### Gossip -### Network Capacity +Transactions will need to be propagated throughout the network, to every participant. Due to the size of the transaction payloads it will be necessary to ensure that transaction propagation is performed as efficiently as possible taking steps to reduce the amount of redundant data transfer. -Transactions are composed of a number of data elements and can vary in size predominantly based on their deployment of any public bytecode and the private kernel proof. A typical transaction that emits a private note and an unencrypted log, makes a public call and contains a valid proof would consume approximately 40Kb of data. A transaction that additionally deploys a contract would need to transmit the public bytecode on top of this. +#### Client Interactions + +Aztec Node instances will offer a JSON RPC interface for consumption by a user's PXE. Part of this API will facilitate transaction submission directly to the node which will then forward it to the network via the transaction pool. + +![P2P Network](../decentralisation/images/network.png) + +### Network Bandwidth + +Transactions are composed of several data elements and can vary in size. The transaction size is determined largely by the private kernel proof and whether the transaction deloys any public bytecode. A typical transaction that emits a private note and an unencrypted log, makes a public call and contains a valid proof would consume approximately 40Kb of data. A transaction that additionally deploys a contract would need to transmit the public bytecode on top of this. | Element | Size | | ------- | ---------------- | | Public Inputs, Public Calls and Emitted Logs | ~8Kb | | Private Kernel Proof | ~32Kb | -At throughputs of 10 and 100 transactions per second, we can arrive at average network bandwidth requirements of 400Kb and 4000Kb per second respectively. +If we take 2 values of transaction throughput of 10 and 100 transactions per second, we can arrive at average network bandwidth requirements of 400Kb and 4000Kb per second respectively. ### Sequencer to Prover Communication -There shouldn't be any requirement for the network to handle communication from sequencers to provers for the purpose of generating proofs. Proving is an out-of-protocol activity so it is likely that provers will obtain their input data in one of 2 ways. - -* Via a direct interface to a prover marketplace over a protocol such as http -* The provers will independently know the sequence of transactions from the commitment phase of the sequencer selection protocol. They can then use the transaction pool to maintain their own state for proof generation +Proving is an out-of-protocol activity. The nature of the communication between sequencers and provers will depend entirely on the prover/s selected by the sequencer. Provers may choose to run their own Transaction Pool Node infrastructure so that they are prepared for generating proofs and don't need to receive this data out-of-band. -### Network Topology and Submitting Transactions +Although proving is an out-of-protocol activity, it may be necessary for the final rollup proof to be gossiped over the P2P network such that anyone can submit it to the rollup contract. -Aztec Node instances will offer a JSON RPC interface for consumption by a user's PXE. Part of this API will facilitate transaction submission directly to the node which will then forward it to the network via the transaction pool. -![P2P Network](../decentralisation/images/network.png) diff --git a/yellow-paper/docs/gas-and-fees/gas-and-fees.md b/yellow-paper/docs/gas-and-fees/gas-and-fees.md index 6c70c3a5d82..f4d63b6b4bf 100644 --- a/yellow-paper/docs/gas-and-fees/gas-and-fees.md +++ b/yellow-paper/docs/gas-and-fees/gas-and-fees.md @@ -156,14 +156,15 @@ This would appear to introduce a circular dependency whereby an appropriate fee - **l1FeeDistributionGas** - The amount of L1 gas the transaction is willing to pay for execution of the fee distribution function - **l2FeeDistributionGas** - The amount of L1 gas the transaction is willing to pay for execution of the fee distribution function -Initially, the values of transaction gas limits can be set to a very high number, the base gas limits set to values corresponding to the user's chosen amortization level and the fees aet to 0. The transaction can be simulated under these conditions and simulation will provide actual gas consumption figures. Simulation can then be repeated with more realistic values of gas limits and the updated gas consumption figures will be reported. A few iterations of this process will enable the user to establish and prepare an appropriate fee. -Simulation of the transaction will provide feedback as to it's gas consumption, this can be repeated to converge on the optimum fee to be prepared. The private portion of the transaction will be proven via the private kernel circuit resulting in a number of fee related public inputs: +Simulation of the transaction will provide feedback as to it's gas consumption, this can be repeated to converge on the optimum values of fee and gas limits for the transaction. The private portion of the transaction will be proven via the private kernel circuit resulting in a number of fee related public inputs: - **feeCommitments** - New commitments generated as part of fee preparation - **feeNullifiers** - New nullifiers generated as part of fee preparation - **feePreparation** - A single public function call to be made as part of fee preparation - **feeDistribution** - A single public function call to be made as part of fee distribution +- **feeEncryptedLogsHash** - The hash of encrypted logs generated by the fee payment +- **feeUnencryptedLogsHash** - The hash of unencrypted logs generated by the fee payment - **feePerL1Gas** - The fee provided per unit of L1 gas - **feePerL2Gas** - The fee provided per unit of L2 gas - **l1BaseGasLimit** - The upper bound of L1 amortized gas the transaction is willing to pay for diff --git a/yellow-paper/docs/private-message-delivery/private-message-delivery.md b/yellow-paper/docs/private-message-delivery/private-message-delivery.md index c379a60bc30..9c905c7b7d3 100644 --- a/yellow-paper/docs/private-message-delivery/private-message-delivery.md +++ b/yellow-paper/docs/private-message-delivery/private-message-delivery.md @@ -6,13 +6,13 @@ sidebar_position: 1 ## Requirements -Maintaining the core tenet of privacy within the Aztec Network imposes a number of requirements on it. If Alice executes a function that generates state for Bob: +Maintaining the core tenet of privacy within the Aztec Network imposes a number of requirements related to the transfer of notes from one user to another. If Alice executes a function that generates a note for Bob: -1. Alice will need to encrypt that state such that Bob, and only Bob is able to decrypt it. -2. Alice will need to broadcast the encrypted state so as to make it available for Bob to retrieve. -3. Alice will need to broadcast a 'tag' alongside the encrypted state. This tag must be identifiable by Bob's chosen [note discovery protocol](./note-discovery.md) but not identifiable by any third party. +1. Alice will need to encrypt that note such that Bob, and only Bob is able to decrypt it. +2. Alice will need to broadcast the encrypted note so as to make it available for Bob to retrieve. +3. Alice will need to broadcast a 'tag' alongside the encrypted note. This tag must be identifiable by Bob's chosen [note discovery protocol](./note-discovery.md) but not identifiable by any third party. -Fulfilling these requirements will enable users to privately identify, retrieve, decrypt and consume their application state. Individual pieces of application state transmitted in this way are termed 'notes'. +Fulfilling these requirements will enable users to privately identify, retrieve, decrypt and consume their application notes. ## Constraining Message Delivery @@ -24,15 +24,19 @@ The network will constrain: Constraining [note encryption](./encryption-and-decryption.md) and tagging will be done through protocol defined functions within a user's account contract. The advantages of this approach are: -1. It enables a user to select their preferred [note discovery protocol](./note-discovery.md) and/or encryption scheme. +1. It enables a user to select their preferred [note discovery protocol](./note-discovery.md) and/or encryption scheme. 2. It ensures that notes are correctly encrypted with a user's public encryption key. -3. It ensures that notes are correctly tagged for a user's chosen [note discovery protocol](./note-discovery.md) . +3. It ensures that notes are correctly tagged for a user's chosen [note discovery protocol](./note-discovery.md). 4. It provides scope for upgrading these functions or introducing new schemes as the field progresses. 5. It protects applications from malicious account contracts providing unprovable functions. +> Note: Constraining tag generation is not solely about ensuring that the generated tag is of the correct format. It is also necessary to constrain that tags are generated in the correct sequence. A tag sequence with duplicate or missing tags makes it much more difficult for the recipient to retrieve their notes. This will likely require tags to be nullified once used. + Constraining publication to the correct data availability layer will be performed via a combination of the protocol circuits and the rollup contract on L1. ## User Handshaking -One function that is useful regardless of a user's preferred note discovery and encryption scheme is for users to be notified when they have been sent a note from another user for the first time. To facilitate this we will deploy a 'handshaking' contract that can be used to create a private note for a recipient containing the sender's information (e.g. public key). The notes generated by this contract will be easy to identify enabling users to retrieve these notes, decrypt them and use the contents in any deterministic tag generation used by their chosen note discovery protocol. Trial decryption of these notes alone should not put too high a burden on end users. +Even if Alice correctly encrypts the note she creates for Bob and generates the correct tag to go with it, how does Bob know that Alice has sent him a note? Bob's [note discovery protocol](./note-discovery.md) may require him to speculatively 'look' for notes with the tags that Alice (and his other counterparties) have generated. If Alice and Bob know each other then they can communicate out-of-protocol. But if they have no way of interacting then the network needs to provide a mechanism by which Bob can be alerted to the need to start searching for a specific sequence of tags. + +To facilitate this we will deploy a 'handshake' contract that can be used to create a private note for a recipient containing the sender's information (e.g. public key). It should only be necessary for a single handshake to take place between two users. The notes generated by this contract will be easy to identify enabling users to retrieve these notes, decrypt them and use the contents in any deterministic tag generation used by their chosen note discovery protocol. diff --git a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx index 12b44617024..ba11a18a0e9 100644 --- a/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx +++ b/yellow-paper/docs/public-vm/gen/_InstructionSet.mdx @@ -362,7 +362,7 @@ Click on an instruction name to jump to its section. Get the historical blocks tree root as of the specified block number. 64 { - `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root` + `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root` } @@ -1266,7 +1266,7 @@ Get the historical blocks tree root as of the specified block number. - **Args**: - **blockNumOffset**: memory offset of the block number input - **dstOffset**: memory offset specifying where to store operation's result -- **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root` +- **Expression**: `M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root` - **Tag updates**: `T[dstOffset] = field` - **Bit-size**: 64 diff --git a/yellow-paper/docs/state/index.md b/yellow-paper/docs/state/index.md new file mode 100644 index 00000000000..edb6a3ca7fd --- /dev/null +++ b/yellow-paper/docs/state/index.md @@ -0,0 +1,13 @@ +--- +title: State +--- + +# State + +Global state in the Aztec Network is represented by a set of Merkle trees: the [Note Hash tree](./note_hash_tree.md), [Nullifier tree](./nullifier_tree.md), and [Public Data tree](./public_data_tree.md) reflect the latest state of the chain. + +Merkle trees are either [append-only](./tree_impls.md#append-only-merkle-trees), for storing immutable data, or [indexed](./tree_impls.md#indexed-merkle-trees), for storing data that requires proofs of non-membership. + +import DocCardList from '@theme/DocCardList'; + + diff --git a/yellow-paper/docs/state/note_hash_tree.md b/yellow-paper/docs/state/note_hash_tree.md new file mode 100644 index 00000000000..383f974fca4 --- /dev/null +++ b/yellow-paper/docs/state/note_hash_tree.md @@ -0,0 +1,25 @@ +# Note Hash Tree + +The Note Hash tree is an [append-only Merkle tree](./tree_impls.md#append-only-merkle-trees) that stores siloed note hashes as its elements. Each element in the tree is a 254-bit altBN-254 scalar field element. This tree is part of the global state, and allows to prove existence of private notes via Merkle membership proofs. + +Note commitments are immutable once created, since notes cannot be modified. Still, notes can be consumed, which means they can no longer be used. To preserve privacy, a consumed note is not removed from the tree, otherwise it would be possible to link the transaction that created a note with the one that consumed it. Instead, a note is consumed by emitting a deterministic [nullifier](./nullifier_tree.md). + +Contracts emit new note commitments via the `new_commitments` in the `CircuitPublicInputs`, which are subsequently [siloed](./tree_impls.md#siloing-leaves) per contract by the Kernel circuit. Siloing the commitment ensures that a contract cannot emit a commitment for a note that could be used for a different contract. + +The Kernel circuit also guarantees uniqueness of commitments by further hashing them with a nonce, derived from the transaction identifier and the index of the commitment within the transaction. Uniqueness means that a note with the same contents can be emitted more than once, and each instance can be independently nullified. Without uniqueness, two notes with the same content would yield the same commitment and nullifier, so nullifying one of them would flag the second one as nullified as well. + +The pseudocode for siloing and making a commitment unique is the following, where each `hash` operation is a Pedersen hash with a unique generator index, indicated by the constant in all caps. + +``` +fn compute_unique_siloed_commitment(commitment, contract, transaction): + let siloed_commitment = hash([contract, commitment], SILOED_COMMITMENT) + let index = index_of(commitment, transaction.commitments) + let nonce = hash([transaction.tx_hash, index], COMMITMENT_NONCE) + return hash([nonce, siloed_commitment], UNIQUE_COMMITMENT) +``` + +The unique siloed commitment of a note is included in the [transaction `data`](../transactions/tx-object.md), and then included into the Note Hash tree by the sequencer as the transaction is included in a block. + +The protocol does not enforce any constraints to the commitment emitted by an application. This means that applications are responsible for including a `randomness` field in the note hash to make the commitment _hiding_ in addition to _binding_. If an application does not include randomness, and the note preimage can be guessed by an attacker, it makes the note vulnerable to preimage attacks, since the siloing and uniqueness steps do not provide hiding. + +Furthermore, since there are no constraints to the commitment emitted by an application, an application can emit any value whatsoever as a `new_commitment`, including values that do not map to a note hash. diff --git a/yellow-paper/docs/state/nullifier_tree.md b/yellow-paper/docs/state/nullifier_tree.md new file mode 100644 index 00000000000..02fd697ab0a --- /dev/null +++ b/yellow-paper/docs/state/nullifier_tree.md @@ -0,0 +1,20 @@ +# Nullifier Tree + +The Nullifier tree is an [indexed Merkle tree](./tree_impls.md#indexed-merkle-trees) that stores nullifier values. Each value stored in the tree is a 254-bit altBN-254 scalar field element. This tree is part of the global state, and allows to prove non-existence of a nullifier when a note is consumed. + +Nullifiers are asserted to be unique during insertion, by checking that the inserted value is not equal to the value and next-value stored in the prior node in the indexed tree. Any attempt to insert a duplicated value is rejected. + +Contracts emit new nullifiers via the `new_nullifiers` in the `CircuitPublicInputs`. Same as elements in the [Note Hash tree](./note_hash_tree.md), nullifiers are [siloed](./tree_impls.md#siloing-leaves) per contract by the Kernel circuit before being inserted in the tree, which ensures that a contract cannot emit nullifiers that affect other contracts. + +``` +fn compute_siloed_nullifier(nullifier, contract): + return hash([contract, nullifier], OUTER_NULLIFIER) +``` + +Nullifiers are primarily used for privately marking notes as consumed. When a note is consumed in an application, the application computes and emits a deterministic nullifier associated to the note. If a user attempts to consume the same note more than once, the same nullifier will be generated, and will be rejected on insertion by the nullifier tree. + +Nullifiers provide privacy by being computed using a deterministic secret value, such as the owner siloed nullifier secret key, or a random value stored in an encrypted note. This ensures that, without knowledge of the secret value, it is not possible to calculate the associated nullifier, and thus it is not possible to link a nullifier to its associated note commitment. + +Applications are not constrained by the protocol on how the nullifier for a note is computed. It is responsibility of the application to guarantee determinism in calculating a nullifier, otherwise the same note could be spent multiple times. + +Furthermore, nullifiers can be emitted by an application just to ensure that an action can be executed only once, such as initializing a value, and are not required to be linked to a note commitment. \ No newline at end of file diff --git a/yellow-paper/docs/state/public_data_tree.md b/yellow-paper/docs/state/public_data_tree.md new file mode 100644 index 00000000000..7af2769b5ec --- /dev/null +++ b/yellow-paper/docs/state/public_data_tree.md @@ -0,0 +1,19 @@ +# Public Data Tree + +The Public Data tree is an [indexed Merkle tree](./tree_impls.md#indexed-merkle-trees) that stores public-state key-value data. Each item stored in the tree is a key-value pair, where both key and value are 254-bit altBN-254 scalar field elements. Items are sorted based on their key, so each indexed tree leaf contains a tuple with the key, the value, the next higher key, and the index in the tree for the next higher key. This tree is part of the global state, and is updated by the sequencer during the execution of public functions. + +The Public Data tree is implemented using an indexed Merkle tree instead of a sparse Merkle tree in order to reduce the tree height. A lower height means shorter membership proofs. + +Keys in the Public Data tree are [siloed](./tree_impls.md#siloing-leaves) using the contract address, to prevent a contract from overwriting public state for another contract. + +``` +fn compute_siloed_public_data_item(key, value, contract): + let siloed_key = hash([contract, key], PUBLIC_DATA_LEAF) + return [siloed_key, value] +``` + +When reading a key from the Public Data tree, the key may or may not be present. If the key is not present, then a non-membership proof is produced, and the value is assumed to be zero. When a key is written to, either a new node is appended to the tree if the key was not present, or its value is overwritten if it was. + +Public functions can read from or write to the Public Data tree by emitting `contract_storage_read` and `contract_storage_update_requests` in the `PublicCircuitPublicInputs`. The Kernel circuit then siloes these requests per contract. + +Contracts can store arbitrary data at a given key, which is always stored as a single field element. Applications are responsible for interpreting this data. Should an application need to store data larger than a single field element, they are responsible for partitioning it across multiple keys. diff --git a/yellow-paper/docs/state/tree_impls.md b/yellow-paper/docs/state/tree_impls.md new file mode 100644 index 00000000000..0a698eb2de4 --- /dev/null +++ b/yellow-paper/docs/state/tree_impls.md @@ -0,0 +1,25 @@ +# Tree implementations + +Aztec relies on two Merkle tree implementations in the protocol: append-only and indexed Merkle trees. + +## Append-only Merkle trees + +In an append-only Merkle tree new leaves are inserted in order from left to right. Existing leaf values are immutable and cannot be modified. These tree are useful to represent historic data, as new entries are added as new transactions and blocks are processed, and historic data is not altered. + +Append-only trees allow for more efficient syncing than sparse trees, since clients can sync from left to right starting with their last known value. Updates to the tree root from new leaves can be computed just by keeping the rightmost boundary of the tree, and batch insertions can be computed with fewer hashes than in a sparse tree. Append-only trees also provide cheap historic snapshots, as older roots can be computed by completing the merkle path from a past left subtree with an empty right subtree. + +## Indexed Merkle trees + +Indexed Merkle trees, introduced [here](https://eprint.iacr.org/2021/1263.pdf), allow for proofs of non-inclusion more efficiently than sparse Merkle trees. Each leaf in the tree is a tuple with the leaf value, the next higher value in the tree, and the index of the leaf where that value is stored. New nodes are inserted left to right, as in the append-only tree, but existing nodes can be modified to update the next value and its pointer. Indexed Merkle trees behave as a Merkle tree over a sorted linked list. + +Assuming the indexed Merkle tree invariants hold, proving non-membership of a value `x` then requires a membership proof of the node with value lower than `x` and a next higher value greater than `x`. The cost of this proof is proportional to the height of the tree, which can be set according to the expected number of elements to be stored in the tree. For comparison, a non-membership proof in a sparse tree requires a tree with height proportional to the size of the elements, so when working with 256-bit elements, 256 hashes are required for a proof. + +Refer to [this page](https://docs.aztec.network/concepts/advanced/data_structures/indexed_merkle_tree) for more details on how insertions, updates, and membership proofs are executed on an Indexed Merkle tree. + + + +## Siloing leaves + +In several trees in the protocol we indicate that its leaves are "siloed". This refers to hashing the leaf value with a siloing value before inserting it in the tree. The siloing value is typically an identifier of the contract that produced the value. This allows us to store disjoint "domains" within the same tree, ensuring that a value emitted from one domain cannot affect others. + +To guarantee the siloing of leaf values, siloing is performed by a trusted protocol circuit, such as the kernel or rollup circuits, and not by an application circuit. Siloing is performed by Pedersen hashing the contract address and the value. diff --git a/yellow-paper/docs/transactions/tx-object.md b/yellow-paper/docs/transactions/tx-object.md index ccfa2f7d4a7..95b736766c4 100644 --- a/yellow-paper/docs/transactions/tx-object.md +++ b/yellow-paper/docs/transactions/tx-object.md @@ -45,7 +45,7 @@ Output of the last iteration of the private kernel circuit. Includes _accumulate | nullifierTreeRoot | Field | Root of the nullifier tree at the time of when this information was assembled. | | contractTreeRoot | Field | Root of the contract tree at the time of when this information was assembled. | | l1ToL2MessagesTreeRoot | Field | Root of the L1 to L2 messages tree at the time of when this information was assembled. | -| blocksTreeRoot | Field | Root of the historic blocks tree at the time of when this information was assembled. | +| archiveRoot | Field | Root of the archive at the time of when this information was assembled. | | privateKernelVkTreeRoot | Field | Root of the private kernel VK tree at the time of when this information was assembled (future enhancement). | | publicDataTreeRoot | Field | Current public state tree hash. | | globalVariablesHash | Field | Previous globals hash, this value is used to recalculate the block hash. | diff --git a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js index 578ac938aaf..6da9d78749c 100644 --- a/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js +++ b/yellow-paper/src/preprocess/InstructionSet/InstructionSet.js @@ -887,7 +887,7 @@ T[retOffset:retOffset+retSize] = field {"name": "blockNumOffset", "description": "memory offset of the block number input"}, {"name": "dstOffset", "description": "memory offset specifying where to store operation's result"}, ], - "Expression": "`M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].blocks_tree_root`", + "Expression": "`M[dstOffset] = HistoricalBlockData[M[blockNumOffset]].archive_root`", "Summary": "Get the historical blocks tree root as of the specified block number.", "Details": "", "Tag checks": "",