From 69c7e99bf53893685fe838763a53664b095fdabf Mon Sep 17 00:00:00 2001 From: Tom French <15848336+TomAFrench@users.noreply.github.com> Date: Thu, 11 Jan 2024 14:14:24 +0000 Subject: [PATCH] feat: sync with main noir repo (#3939) subrepo: subdir: "noir" merged: "2211cf0d0" upstream: origin: "https://github.com/noir-lang/noir" branch: "aztec-packages" commit: "3f6a24d07" git-subrepo: version: "0.4.6" origin: "https://github.com/ingydotnet/git-subrepo" commit: "110b9eb" --------- Co-authored-by: sirasistant --- noir/.dockerignore | 9 + noir/.github/scripts/acvm_js-build.sh | 5 + noir/.github/scripts/acvm_js-test-browser.sh | 5 + noir/.github/scripts/acvm_js-test.sh | 4 + .../scripts/backend-barretenberg-build.sh | 4 + .../scripts/backend-barretenberg-test.sh | 4 + noir/.github/scripts/install_wasm-bindgen.sh | 5 + noir/.github/scripts/integration-test.sh | 6 + noir/.github/scripts/nargo-build.sh | 8 + noir/.github/scripts/nargo-test.sh | 10 + noir/.github/scripts/noir-codegen-build.sh | 4 + noir/.github/scripts/noir-codegen-test.sh | 4 + noir/.github/scripts/noir-js-build.sh | 4 + noir/.github/scripts/noir-js-test.sh | 6 + noir/.github/scripts/noir-js-types-build.sh | 4 + noir/.github/scripts/noir-wasm-build.sh | 5 + .../.github/scripts/noir-wasm-test-browser.sh | 6 + noir/.github/scripts/noir-wasm-test.sh | 7 + noir/.github/scripts/noirc-abi-build.sh | 5 + .../.github/scripts/noirc-abi-test-browser.sh | 5 + noir/.github/scripts/noirc-abi-test.sh | 4 + noir/.github/workflows/docker-test-flow.yml | 733 ++++++++++++++++++ noir/.gitrepo | 2 +- noir/Cargo.lock | 1 + noir/Dockerfile.ci | 49 +- noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs | 79 -- noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs | 6 +- noir/acvm-repo/blackbox_solver/Cargo.toml | 1 + noir/acvm-repo/blackbox_solver/src/lib.rs | 82 ++ .../src/brillig/brillig_gen/brillig_block.rs | 59 +- .../noirc_evaluator/src/brillig/brillig_ir.rs | 55 +- .../src/brillig/brillig_ir/debug_show.rs | 4 +- .../noirc_evaluator/src/ssa/ir/dfg.rs | 30 +- .../noirc_evaluator/src/ssa/ir/instruction.rs | 247 ++++-- .../src/ssa/opt/constant_folding.rs | 176 ++++- .../noirc_frontend/src/hir/def_map/mod.rs | 1 + noir/cspell.json | 1 + noir/docs/src/pages/index.jsx | 2 +- noir/noir_stdlib/src/lib.nr | 1 + noir/noir_stdlib/src/prelude.nr | 1 + noir/noir_stdlib/src/uint128.nr | 292 +++++++ .../nargo_compile_noir_codegen_assert_lt.sh | 4 - .../reexports/Nargo.toml | 7 + .../reexports/src/main.nr | 8 + .../execution_success/u128/Nargo.toml | 6 + .../execution_success/u128/Prover.toml | 7 + .../execution_success/u128/src/main.nr | 44 ++ .../test_libraries/exporting_lib/Nargo.toml | 6 + .../test_libraries/exporting_lib/src/lib.nr | 10 + .../test_libraries/reexporting_lib/Nargo.toml | 7 + .../test_libraries/reexporting_lib/src/lib.nr | 3 + .../noir_codegen/test/test_lib/Nargo.toml | 1 + .../src/private_kernel_ordering.nr | 2 +- .../crates/public-kernel-lib/src/common.nr | 10 +- .../base_or_merge_rollup_public_inputs.nr | 4 +- .../src/crates/rollup-lib/src/components.nr | 10 +- .../src/merge/merge_rollup_inputs.nr | 10 +- .../src/crates/rollup-lib/src/root.nr | 10 +- .../src/crates/types/src/hash.nr | 14 +- .../src/crates/types/src/utils/uint128.nr | 8 +- 60 files changed, 1792 insertions(+), 315 deletions(-) create mode 100755 noir/.github/scripts/acvm_js-build.sh create mode 100755 noir/.github/scripts/acvm_js-test-browser.sh create mode 100755 noir/.github/scripts/acvm_js-test.sh create mode 100755 noir/.github/scripts/backend-barretenberg-build.sh create mode 100755 noir/.github/scripts/backend-barretenberg-test.sh create mode 100755 noir/.github/scripts/install_wasm-bindgen.sh create mode 100755 noir/.github/scripts/integration-test.sh create mode 100755 noir/.github/scripts/nargo-build.sh create mode 100755 noir/.github/scripts/nargo-test.sh create mode 100755 noir/.github/scripts/noir-codegen-build.sh create mode 100755 noir/.github/scripts/noir-codegen-test.sh create mode 100755 noir/.github/scripts/noir-js-build.sh create mode 100755 noir/.github/scripts/noir-js-test.sh create mode 100755 noir/.github/scripts/noir-js-types-build.sh create mode 100755 noir/.github/scripts/noir-wasm-build.sh create mode 100755 noir/.github/scripts/noir-wasm-test-browser.sh create mode 100755 noir/.github/scripts/noir-wasm-test.sh create mode 100755 noir/.github/scripts/noirc-abi-build.sh create mode 100755 noir/.github/scripts/noirc-abi-test-browser.sh create mode 100755 noir/.github/scripts/noirc-abi-test.sh create mode 100644 noir/.github/workflows/docker-test-flow.yml create mode 100644 noir/noir_stdlib/src/uint128.nr delete mode 100755 noir/scripts/nargo_compile_noir_codegen_assert_lt.sh create mode 100644 noir/test_programs/compile_success_empty/reexports/Nargo.toml create mode 100644 noir/test_programs/compile_success_empty/reexports/src/main.nr create mode 100644 noir/test_programs/execution_success/u128/Nargo.toml create mode 100644 noir/test_programs/execution_success/u128/Prover.toml create mode 100644 noir/test_programs/execution_success/u128/src/main.nr create mode 100644 noir/test_programs/test_libraries/exporting_lib/Nargo.toml create mode 100644 noir/test_programs/test_libraries/exporting_lib/src/lib.nr create mode 100644 noir/test_programs/test_libraries/reexporting_lib/Nargo.toml create mode 100644 noir/test_programs/test_libraries/reexporting_lib/src/lib.nr diff --git a/noir/.dockerignore b/noir/.dockerignore index 8bea0aeeb1a..559b271bf38 100644 --- a/noir/.dockerignore +++ b/noir/.dockerignore @@ -1,6 +1,15 @@ Dockerfile* .dockerignore +# Yarn +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/sdks +!.yarn/versions + packages **/package.tgz **/target diff --git a/noir/.github/scripts/acvm_js-build.sh b/noir/.github/scripts/acvm_js-build.sh new file mode 100755 index 00000000000..0565a9bb89f --- /dev/null +++ b/noir/.github/scripts/acvm_js-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/acvm_js build diff --git a/noir/.github/scripts/acvm_js-test-browser.sh b/noir/.github/scripts/acvm_js-test-browser.sh new file mode 100755 index 00000000000..598c98dadf2 --- /dev/null +++ b/noir/.github/scripts/acvm_js-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/acvm_js test:browser diff --git a/noir/.github/scripts/acvm_js-test.sh b/noir/.github/scripts/acvm_js-test.sh new file mode 100755 index 00000000000..d5519d26cc4 --- /dev/null +++ b/noir/.github/scripts/acvm_js-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/acvm_js test diff --git a/noir/.github/scripts/backend-barretenberg-build.sh b/noir/.github/scripts/backend-barretenberg-build.sh new file mode 100755 index 00000000000..d90995397d8 --- /dev/null +++ b/noir/.github/scripts/backend-barretenberg-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg build diff --git a/noir/.github/scripts/backend-barretenberg-test.sh b/noir/.github/scripts/backend-barretenberg-test.sh new file mode 100755 index 00000000000..1bd6f8e410d --- /dev/null +++ b/noir/.github/scripts/backend-barretenberg-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg test diff --git a/noir/.github/scripts/install_wasm-bindgen.sh b/noir/.github/scripts/install_wasm-bindgen.sh new file mode 100755 index 00000000000..b8c41393ab0 --- /dev/null +++ b/noir/.github/scripts/install_wasm-bindgen.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash +cargo-binstall wasm-bindgen-cli --version 0.2.86 -y diff --git a/noir/.github/scripts/integration-test.sh b/noir/.github/scripts/integration-test.sh new file mode 100755 index 00000000000..4e1b52cedf9 --- /dev/null +++ b/noir/.github/scripts/integration-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +apt-get install libc++-dev -y +npx playwright install && npx playwright install-deps +yarn workspace integration-tests test \ No newline at end of file diff --git a/noir/.github/scripts/nargo-build.sh b/noir/.github/scripts/nargo-build.sh new file mode 100755 index 00000000000..2115732ab7e --- /dev/null +++ b/noir/.github/scripts/nargo-build.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo build --release diff --git a/noir/.github/scripts/nargo-test.sh b/noir/.github/scripts/nargo-test.sh new file mode 100755 index 00000000000..9234df7bf5c --- /dev/null +++ b/noir/.github/scripts/nargo-test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +apt-get install -y curl libc++-dev + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo test --workspace --locked --release \ No newline at end of file diff --git a/noir/.github/scripts/noir-codegen-build.sh b/noir/.github/scripts/noir-codegen-build.sh new file mode 100755 index 00000000000..d42be4d676e --- /dev/null +++ b/noir/.github/scripts/noir-codegen-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen build diff --git a/noir/.github/scripts/noir-codegen-test.sh b/noir/.github/scripts/noir-codegen-test.sh new file mode 100755 index 00000000000..6f603f65507 --- /dev/null +++ b/noir/.github/scripts/noir-codegen-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen test \ No newline at end of file diff --git a/noir/.github/scripts/noir-js-build.sh b/noir/.github/scripts/noir-js-build.sh new file mode 100755 index 00000000000..04367e41342 --- /dev/null +++ b/noir/.github/scripts/noir-js-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_js build diff --git a/noir/.github/scripts/noir-js-test.sh b/noir/.github/scripts/noir-js-test.sh new file mode 100755 index 00000000000..b5fe34038fe --- /dev/null +++ b/noir/.github/scripts/noir-js-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_noir_js_assert_lt.sh +rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json +yarn workspace @noir-lang/noir_js test \ No newline at end of file diff --git a/noir/.github/scripts/noir-js-types-build.sh b/noir/.github/scripts/noir-js-types-build.sh new file mode 100755 index 00000000000..77b08651d68 --- /dev/null +++ b/noir/.github/scripts/noir-js-types-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/types build \ No newline at end of file diff --git a/noir/.github/scripts/noir-wasm-build.sh b/noir/.github/scripts/noir-wasm-build.sh new file mode 100755 index 00000000000..4523751612d --- /dev/null +++ b/noir/.github/scripts/noir-wasm-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/noir_wasm build \ No newline at end of file diff --git a/noir/.github/scripts/noir-wasm-test-browser.sh b/noir/.github/scripts/noir-wasm-test-browser.sh new file mode 100755 index 00000000000..4b584abce23 --- /dev/null +++ b/noir/.github/scripts/noir-wasm-test-browser.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser \ No newline at end of file diff --git a/noir/.github/scripts/noir-wasm-test.sh b/noir/.github/scripts/noir-wasm-test.sh new file mode 100755 index 00000000000..03e1bac2330 --- /dev/null +++ b/noir/.github/scripts/noir-wasm-test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +yarn workspace @noir-lang/noir_wasm test:node +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser diff --git a/noir/.github/scripts/noirc-abi-build.sh b/noir/.github/scripts/noirc-abi-build.sh new file mode 100755 index 00000000000..d5da6deaa0f --- /dev/null +++ b/noir/.github/scripts/noirc-abi-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/install_wasm-bindgen.sh +yarn workspace @noir-lang/noirc_abi build diff --git a/noir/.github/scripts/noirc-abi-test-browser.sh b/noir/.github/scripts/noirc-abi-test-browser.sh new file mode 100755 index 00000000000..7a966cb5e94 --- /dev/null +++ b/noir/.github/scripts/noirc-abi-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noirc_abi test:browser diff --git a/noir/.github/scripts/noirc-abi-test.sh b/noir/.github/scripts/noirc-abi-test.sh new file mode 100755 index 00000000000..39ca0a44b07 --- /dev/null +++ b/noir/.github/scripts/noirc-abi-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noirc_abi test diff --git a/noir/.github/workflows/docker-test-flow.yml b/noir/.github/workflows/docker-test-flow.yml new file mode 100644 index 00000000000..0277812f5ae --- /dev/null +++ b/noir/.github/workflows/docker-test-flow.yml @@ -0,0 +1,733 @@ +name: Test Nargo and JS packages + +on: + push: + branches: + - 'master' + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-base-nargo: + name: Build base nargo docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: prepare docker images tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-nargo" + FULL_TAGS="${TAGS},${IMAGE}:latest-nargo,${IMAGE}:v${{ steps.date.outputs.date }}-nargo" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build nargo base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-nargo + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + build-base-js: + name: Build base js docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: Prepare docker image tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-js" + FULL_TAGS="${TAGS},${IMAGE}:latest-js,${IMAGE}:v${{ steps.date.outputs.date }}-js" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build js base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-js + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + artifact-nargo: + name: Artifact nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Artifact nargo + uses: actions/upload-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release/nargo + if-no-files-found: error + compression-level: 0 + + test-nargo: + name: Test nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Test + working-directory: /usr/src/noir + run: | + .github/scripts/nargo-test.sh + + build-noir-wasm: + name: Build noir wasm + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm/outputs/out/noir_wasm + retention-days: 10 + + test-noir-wasm: + name: Test noir wasm + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test.sh + + test-noir-wasm-browser: + name: Test noir wasm browser + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test-browser.sh + + build-acvm_js: + name: Build acvm js + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: acvm_js + path: + /usr/src/noir/acvm-repo/acvm_js/outputs/out/acvm_js + if-no-files-found: error + compression-level: 0 + + test-acvm_js: + name: Test acvm js + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test.sh + + test-acvm_js-browser: + name: Test acvm js browser + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test-browser.sh + + build-noirc-abi: + name: Build noirc abi + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noirc_abi_wasm + path: + /usr/src/noir/tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm + if-no-files-found: error + compression-level: 0 + + test-noirc-abi: + name: Test noirc abi + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test.sh + + test-noirc-abi-browser: + name: Test noirc abi browser + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test-browser.sh + + build-noir-js-types: + name: Build noir js types + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-types-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + if-no-files-found: error + compression-level: 0 + + build-barretenberg-backend: + name: Build Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + if-no-files-found: error + compression-level: 0 + + test-barretenberg-backend: + name: Test Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types, build-barretenberg-backend] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Download Backend barretenberg + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-test.sh + + build-noir_js: + name: Build noirjs + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-barretenberg-backend, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + + test-noir_js: + name: Test noirjs + runs-on: ubuntu-latest + needs: [ + build-base-js, + build-noirc-abi, + artifact-nargo, + build-acvm_js, + build-barretenberg-backend, + build-noir_js, + build-noir-js-types + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-test.sh + + build-noir_codegen: + name: Build noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi package + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + + test-noir_codegen: + name: Test noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js, build-noir_codegen] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download noir codegen + uses: actions/download-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-test.sh + + test-integration: + name: Integration test + runs-on: ubuntu-latest + needs: [ + build-base-js, + artifact-nargo, + build-noir-wasm, + build-noirc-abi, + build-acvm_js, + build-noir-js-types, + build-noir_js, + build-barretenberg-backend + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir wasm + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/integration-test.sh + + tests-end: + name: End + runs-on: ubuntu-latest + if: ${{ always() }} + needs: + - test-nargo + - test-noirc-abi + - test-noirc-abi-browser + - test-noir-wasm + - test-noir-wasm-browser + - test-integration + - test-noir_codegen + - test-acvm_js + - test-acvm_js-browser + - test-barretenberg-backend + - test-noir_js + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} diff --git a/noir/.gitrepo b/noir/.gitrepo index 053acae1f4f..3a8edaae673 100644 --- a/noir/.gitrepo +++ b/noir/.gitrepo @@ -6,7 +6,7 @@ [subrepo] remote = https://github.com/noir-lang/noir branch = aztec-packages - commit = 114aa6f90f147fe69ff424e881463a65df26c4e6 + commit = 3f6a24d07f7081932926191ef4549b51e0036b5a parent = 9a80008c623a9d26e1b82c9e86561c304ef185f1 method = merge cmdver = 0.4.6 diff --git a/noir/Cargo.lock b/noir/Cargo.lock index 50892d98ff3..8aca450c6cd 100644 --- a/noir/Cargo.lock +++ b/noir/Cargo.lock @@ -60,6 +60,7 @@ dependencies = [ "blake2", "blake3", "k256", + "keccak", "p256", "sha2", "sha3", diff --git a/noir/Dockerfile.ci b/noir/Dockerfile.ci index 9ca995fd94f..57dcbe9cfee 100644 --- a/noir/Dockerfile.ci +++ b/noir/Dockerfile.ci @@ -1,40 +1,31 @@ -FROM rust:1-slim-bookworm as test-base +FROM rust:1.71.1-slim-bookworm as base RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y WORKDIR /usr/src/noir -COPY . . -RUN ./scripts/bootstrap_native.sh -ENV PATH="${PATH}:/usr/src/noir/target/release/" +ENV PATH="${PATH}:/usr/src/noir/target/release" -FROM test-base as test-cargo -RUN apt-get install -y curl libc++-dev -RUN ./scripts/test_native.sh +FROM base as base-nargo +COPY . . +RUN .github/scripts/nargo-build.sh -FROM test-base as test-js -RUN apt-get install pkg-config libssl-dev -y -RUN ./scripts/install_wasm-bindgen.sh +FROM base as base-js RUN apt-get install -y ca-certificates curl gnupg RUN mkdir -p /etc/apt/keyrings RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list RUN apt-get update && apt-get install nodejs -y RUN corepack enable -RUN yarn --immutable RUN apt-get install -y jq -RUN yarn build -RUN yarn workspace @noir-lang/acvm_js test -RUN npx playwright install && npx playwright install-deps -RUN yarn workspace @noir-lang/acvm_js test:browser -RUN yarn workspace @noir-lang/noirc_abi test -RUN yarn workspace @noir-lang/noirc_abi test:browser -RUN yarn workspace @noir-lang/backend_barretenberg test -RUN ./scripts/nargo_compile_noir_js_assert_lt.sh -RUN rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json -RUN yarn workspace @noir-lang/noir_js test -RUN ./scripts/nargo_compile_wasm_fixtures.sh -RUN yarn workspace @noir-lang/noir_wasm test:node -RUN yarn workspace @noir-lang/noir_wasm test:browser -RUN ./scripts/nargo_compile_noir_codegen_assert_lt.sh -RUN rm -rf /usr/src/noir/tooling/noir_codegen/test/assert_lt/target/debug_assert_lt.json -RUN yarn workspace @noir-lang/noir_codegen test -RUN apt-get install -y libc++-dev -RUN yarn test:integration +COPY yarn.lock package.json .yarnrc.yml ./ +COPY .yarn/ ./.yarn/ +COPY ./acvm-repo/acvm_js/package.json ./acvm-repo/acvm_js/ +COPY ./tooling/noirc_abi_wasm/package.json ./tooling/noirc_abi_wasm/ +COPY ./compiler/wasm/package.json ./compiler/wasm/ +COPY ./tooling/noir_js_types/package.json ./tooling/noir_js_types/ +COPY ./tooling/noir_js_backend_barretenberg/package.json ./tooling/noir_js_backend_barretenberg/ +COPY ./tooling/noir_js/package.json ./tooling/noir_js/ +COPY ./tooling/noir_codegen/package.json ./tooling/noir_codegen/ +COPY ./compiler/integration-tests/package.json ./compiler/integration-tests/ +COPY ./release-tests/package.json ./release-tests/ +COPY ./docs/package.json ./docs/ +RUN yarn --immutable +COPY . . diff --git a/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs index bbf7dd43bd9..1ada397fc59 100644 --- a/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/noir/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -86,82 +86,3 @@ fn write_digest_to_outputs( Ok(()) } - -const ROUNDS: usize = 24; - -const RC: [u64; ROUNDS] = [ - 1u64, - 0x8082u64, - 0x800000000000808au64, - 0x8000000080008000u64, - 0x808bu64, - 0x80000001u64, - 0x8000000080008081u64, - 0x8000000000008009u64, - 0x8au64, - 0x88u64, - 0x80008009u64, - 0x8000000au64, - 0x8000808bu64, - 0x800000000000008bu64, - 0x8000000000008089u64, - 0x8000000000008003u64, - 0x8000000000008002u64, - 0x8000000000000080u64, - 0x800au64, - 0x800000008000000au64, - 0x8000000080008081u64, - 0x8000000000008080u64, - 0x80000001u64, - 0x8000000080008008u64, -]; - -const RHO: [u32; 24] = - [1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44]; - -const PI: [usize; 24] = - [10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1]; - -const KECCAK_LANES: usize = 25; - -pub(crate) fn keccakf1600(state: &mut [u64; KECCAK_LANES]) { - for rc in RC { - let mut array: [u64; 5] = [0; 5]; - - // Theta - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - array[x] ^= state[x + y]; - } - } - - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - state[y + x] ^= array[(x + 4) % 5] ^ array[(x + 1) % 5].rotate_left(1); - } - } - - // Rho and pi - let mut last = state[1]; - for x in 0..24 { - array[0] = state[PI[x]]; - state[PI[x]] = last.rotate_left(RHO[x]); - last = array[0]; - } - - // Chi - for y_step in 0..5 { - let y = y_step * 5; - array[..5].copy_from_slice(&state[y..(5 + y)]); - - for x in 0..5 { - state[y + x] = array[x] ^ ((!array[(x + 1) % 5]) & (array[(x + 2) % 5])); - } - } - - // Iota - state[0] ^= rc; - } -} diff --git a/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs index eb16c984b00..ca355b6045d 100644 --- a/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/noir/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -3,9 +3,9 @@ use acir::{ native_types::{Witness, WitnessMap}, FieldElement, }; -use acvm_blackbox_solver::{blake2s, blake3, keccak256, sha256}; +use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; -use self::{hash::keccakf1600, pedersen::pedersen_hash}; +use self::pedersen::pedersen_hash; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; @@ -119,7 +119,7 @@ pub(crate) fn solve( let lane = witness_assignment.try_to_u64(); state[i] = lane.unwrap(); } - keccakf1600(&mut state); + let state = keccakf1600(state)?; for (output_witness, value) in outputs.iter().zip(state.into_iter()) { insert_value(output_witness, FieldElement::from(value as u128), initial_witness)?; } diff --git a/noir/acvm-repo/blackbox_solver/Cargo.toml b/noir/acvm-repo/blackbox_solver/Cargo.toml index 258321d8ef4..749ef8f289a 100644 --- a/noir/acvm-repo/blackbox_solver/Cargo.toml +++ b/noir/acvm-repo/blackbox_solver/Cargo.toml @@ -20,6 +20,7 @@ blake2 = "0.10.6" blake3 = "1.5.0" sha2 = "0.10.6" sha3 = "0.10.6" +keccak = "0.1.4" k256 = { version = "0.11.0", features = [ "ecdsa", "ecdsa-core", diff --git a/noir/acvm-repo/blackbox_solver/src/lib.rs b/noir/acvm-repo/blackbox_solver/src/lib.rs index 6458f4e6f64..e11b9316fdd 100644 --- a/noir/acvm-repo/blackbox_solver/src/lib.rs +++ b/noir/acvm-repo/blackbox_solver/src/lib.rs @@ -80,6 +80,15 @@ pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) } +const KECCAK_LANES: usize = 25; + +pub fn keccakf1600( + mut state: [u64; KECCAK_LANES], +) -> Result<[u64; KECCAK_LANES], BlackBoxResolutionError> { + keccak::f1600(&mut state); + Ok(state) +} + pub fn ecdsa_secp256k1_verify( hashed_msg: &[u8], public_key_x: &[u8; 32], @@ -241,6 +250,79 @@ fn verify_secp256r1_ecdsa_signature( } } +#[cfg(test)] +mod keccakf1600_tests { + use crate::keccakf1600; + + #[test] + fn sanity_check() { + // Test vectors are copied from XKCP (eXtended Keccak Code Package) + // https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KeccakF-1600-IntermediateValues.txt + let zero_state = [0u64; 25]; + + let expected_state_first = [ + 0xF1258F7940E1DDE7, + 0x84D5CCF933C0478A, + 0xD598261EA65AA9EE, + 0xBD1547306F80494D, + 0x8B284E056253D057, + 0xFF97A42D7F8E6FD4, + 0x90FEE5A0A44647C4, + 0x8C5BDA0CD6192E76, + 0xAD30A6F71B19059C, + 0x30935AB7D08FFC64, + 0xEB5AA93F2317D635, + 0xA9A6E6260D712103, + 0x81A57C16DBCF555F, + 0x43B831CD0347C826, + 0x01F22F1A11A5569F, + 0x05E5635A21D9AE61, + 0x64BEFEF28CC970F2, + 0x613670957BC46611, + 0xB87C5A554FD00ECB, + 0x8C3EE88A1CCF32C8, + 0x940C7922AE3A2614, + 0x1841F924A2C509E4, + 0x16F53526E70465C2, + 0x75F644E97F30A13B, + 0xEAF1FF7B5CECA249, + ]; + let expected_state_second = [ + 0x2D5C954DF96ECB3C, + 0x6A332CD07057B56D, + 0x093D8D1270D76B6C, + 0x8A20D9B25569D094, + 0x4F9C4F99E5E7F156, + 0xF957B9A2DA65FB38, + 0x85773DAE1275AF0D, + 0xFAF4F247C3D810F7, + 0x1F1B9EE6F79A8759, + 0xE4FECC0FEE98B425, + 0x68CE61B6B9CE68A1, + 0xDEEA66C4BA8F974F, + 0x33C43D836EAFB1F5, + 0xE00654042719DBD9, + 0x7CF8A9F009831265, + 0xFD5449A6BF174743, + 0x97DDAD33D8994B40, + 0x48EAD5FC5D0BE774, + 0xE3B8C8EE55B7B03C, + 0x91A0226E649E42E9, + 0x900E3129E7BADD7B, + 0x202A9EC5FAA3CCE8, + 0x5B3402464E1C3DB6, + 0x609F4E62A44C1059, + 0x20D06CD26A8FBF5C, + ]; + + let state_first = keccakf1600(zero_state).unwrap(); + let state_second = keccakf1600(state_first).unwrap(); + + assert_eq!(state_first, expected_state_first); + assert_eq!(state_second, expected_state_second); + } +} + #[cfg(test)] mod secp256k1_tests { use super::verify_secp256k1_ecdsa_signature; diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 0e06a36fd94..db005d9d438 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -521,7 +521,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("unsupported function call type {:?}", dfg[*func]) } }, - Instruction::Truncate { value, .. } => { + Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -530,9 +530,13 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.brillig_context.truncate_instruction(destination_register, source_register); + self.brillig_context.truncate_instruction( + destination_register, + source_register, + *bit_size, + ); } - Instruction::Cast(value, target_type) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -541,12 +545,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast( - destination_register, - source_register, - target_type, - &dfg.type_of_value(*value), - ); + self.convert_cast(destination_register, source_register); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -1092,43 +1091,11 @@ impl<'block> BrilligBlock<'block> { /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_type: &Type, - source_type: &Type, - ) { - fn numeric_to_bit_size(typ: &NumericType) -> u32 { - match typ { - NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => *bit_size, - NumericType::NativeField => FieldElement::max_num_bits(), - } - } - // Casting is only valid for numeric types - // This should be checked by the frontend, so we panic if this is the case - let (source_numeric_type, target_numeric_type) = match (source_type, target_type) { - (Type::Numeric(source_numeric_type), Type::Numeric(target_numeric_type)) => { - (source_numeric_type, target_numeric_type) - } - _ => unimplemented!("The cast operation is only valid for integers."), - }; - let source_bit_size = numeric_to_bit_size(source_numeric_type); - let target_bit_size = numeric_to_bit_size(target_numeric_type); - // Casting from a larger bit size to a smaller bit size (narrowing cast) - // requires a cast instruction. - // If its a widening cast, ie casting from a smaller bit size to a larger bit size - // we simply put a mov instruction as a no-op - // - // Field elements by construction always have the largest bit size - // This means that casting to a Field element, will always be a widening cast - // and therefore a no-op. Conversely, casting from a Field element - // will always be a narrowing cast and therefore a cast instruction - if source_bit_size > target_bit_size { - self.brillig_context.cast_instruction(destination, source, target_bit_size); - } else { - self.brillig_context.mov_instruction(destination, source); - } + fn convert_cast(&mut self, destination: RegisterIndex, source: RegisterIndex) { + // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted + // to ensure this is the case. + + self.brillig_context.mov_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index ff182aaa7d2..3c4e77b09ec 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -687,10 +687,29 @@ impl BrilligContext { &mut self, destination_of_truncated_value: RegisterIndex, value_to_truncate: RegisterIndex, + bit_size: u32, ) { - // Effectively a no-op because brillig already has implicit truncation on integer - // operations. We need only copy the value to it's destination. - self.mov_instruction(destination_of_truncated_value, value_to_truncate); + self.debug_show.truncate_instruction( + destination_of_truncated_value, + value_to_truncate, + bit_size, + ); + assert!( + bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + "tried to truncate to a bit size greater than allowed {bit_size}" + ); + + // The brillig VM performs all arithmetic operations modulo 2**bit_size + // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation + // With bit size equal to target_bit_size + let zero_register = self.make_constant(Value::from(FieldElement::zero())); + self.binary_instruction( + value_to_truncate, + zero_register, + destination_of_truncated_value, + BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + ); + self.deallocate_register(zero_register); } /// Emits a stop instruction @@ -761,36 +780,6 @@ impl BrilligContext { self.deallocate_register(scratch_register_j); } - /// Emits a modulo instruction against 2**target_bit_size - /// - /// Integer arithmetic in Brillig is currently constrained to 127 bit integers. - /// We restrict the cast operation, so that integer types over 127 bits - /// cannot be created. - pub(crate) fn cast_instruction( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_bit_size: u32, - ) { - self.debug_show.cast_instruction(destination, source, target_bit_size); - assert!( - target_bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to cast to a bit size greater than allowed {target_bit_size}" - ); - - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to cast any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero())); - self.binary_instruction( - source, - zero_register, - destination, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size: target_bit_size }, - ); - self.deallocate_register(zero_register); - } - /// Adds a unresolved external `Call` instruction to the bytecode. /// This calls into another function compiled into this brillig artifact. pub(crate) fn add_external_call_instruction(&mut self, func_label: T) { diff --git a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index 6add4c97e32..66c6b3b0249 100644 --- a/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/noir/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -326,7 +326,7 @@ impl DebugShow { } /// Debug function for cast_instruction - pub(crate) fn cast_instruction( + pub(crate) fn truncate_instruction( &self, destination: RegisterIndex, source: RegisterIndex, @@ -334,7 +334,7 @@ impl DebugShow { ) { debug_println!( self.enable_debug_trace, - " CAST {} FROM {} TO {} BITS", + " TRUNCATE {} FROM {} TO {} BITS", destination, source, target_bit_size diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index 931aee9d079..98c0253c6ef 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -166,11 +166,31 @@ impl DataFlowGraph { SimplifiedToMultiple(simplification) } SimplifyResult::Remove => InstructionRemoved, - result @ (SimplifyResult::SimplifiedToInstruction(_) | SimplifyResult::None) => { - let instruction = result.instruction().unwrap_or(instruction); - let id = self.make_instruction(instruction, ctrl_typevars); - self.blocks[block].insert_instruction(id); - self.locations.insert(id, call_stack); + result @ (SimplifyResult::SimplifiedToInstruction(_) + | SimplifyResult::SimplifiedToInstructionMultiple(_) + | SimplifyResult::None) => { + let instructions = result.instructions().unwrap_or(vec![instruction]); + + if instructions.len() > 1 { + // There's currently no way to pass results from one instruction in `instructions` on to the next. + // We then restrict this to only support multiple instructions if they're all `Instruction::Constrain` + // as this instruction type does not have any results. + assert!( + instructions.iter().all(|instruction| matches!(instruction, Instruction::Constrain(..))), + "`SimplifyResult::SimplifiedToInstructionMultiple` only supports `Constrain` instructions" + ); + } + + let mut last_id = None; + + for instruction in instructions { + let id = self.make_instruction(instruction, ctrl_typevars.clone()); + self.blocks[block].insert_instruction(id); + self.locations.insert(id, call_stack.clone()); + last_id = Some(id); + } + + let id = last_id.expect("There should be at least 1 simplified instruction"); InsertInstructionResult::Results(id, self.instruction_results(id)) } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 9691017f04b..afd182ab2dd 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -432,73 +432,11 @@ impl Instruction { } } Instruction::Constrain(lhs, rhs, msg) => { - if dfg.resolve(*lhs) == dfg.resolve(*rhs) { - // Remove trivial case `assert_eq(x, x)` - SimplifyResult::Remove + let constraints = decompose_constrain(*lhs, *rhs, msg.clone(), dfg); + if constraints.is_empty() { + Remove } else { - match (&dfg[dfg.resolve(*lhs)], &dfg[dfg.resolve(*rhs)]) { - ( - Value::NumericConstant { constant, typ }, - Value::Instruction { instruction, .. }, - ) - | ( - Value::Instruction { instruction, .. }, - Value::NumericConstant { constant, typ }, - ) if *typ == Type::bool() => { - match dfg[*instruction] { - Instruction::Binary(Binary { - lhs, - rhs, - operator: BinaryOp::Eq, - }) if constant.is_one() => { - // Replace an explicit two step equality assertion - // - // v2 = eq v0, u32 v1 - // constrain v2 == u1 1 - // - // with a direct assertion of equality between the two values - // - // v2 = eq v0, u32 v1 - // constrain v0 == v1 - // - // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it - // will likely be removed through dead instruction elimination. - - SimplifiedToInstruction(Instruction::Constrain( - lhs, - rhs, - msg.clone(), - )) - } - Instruction::Not(value) => { - // Replace an assertion that a not instruction is truthy - // - // v1 = not v0 - // constrain v1 == u1 1 - // - // with an assertion that the not instruction input is falsy - // - // v1 = not v0 - // constrain v0 == u1 0 - // - // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it - // will likely be removed through dead instruction elimination. - let reversed_constant = FieldElement::from(!constant.is_one()); - let reversed_constant = - dfg.make_constant(reversed_constant, Type::bool()); - SimplifiedToInstruction(Instruction::Constrain( - value, - reversed_constant, - msg.clone(), - )) - } - - _ => None, - } - } - - _ => None, - } + SimplifiedToInstructionMultiple(constraints) } } Instruction::ArrayGet { array, index } => { @@ -533,16 +471,43 @@ impl Instruction { let truncated = numeric_constant.to_u128() % integer_modulus; SimplifiedTo(dfg.make_constant(truncated.into(), typ)) } else if let Value::Instruction { instruction, .. } = &dfg[dfg.resolve(*value)] { - if let Instruction::Truncate { bit_size: src_bit_size, .. } = &dfg[*instruction] - { - // If we're truncating the value to fit into the same or larger bit size then this is a noop. - if src_bit_size <= bit_size && src_bit_size <= max_bit_size { - SimplifiedTo(*value) - } else { - None + match &dfg[*instruction] { + Instruction::Truncate { bit_size: src_bit_size, .. } => { + // If we're truncating the value to fit into the same or larger bit size then this is a noop. + if src_bit_size <= bit_size && src_bit_size <= max_bit_size { + SimplifiedTo(*value) + } else { + None + } } - } else { - None + + Instruction::Binary(Binary { + lhs, rhs, operator: BinaryOp::Div, .. + }) if dfg.is_constant(*rhs) => { + // If we're truncating the result of a division by a constant denominator, we can + // reason about the maximum bit size of the result and whether a truncation is necessary. + + let numerator_type = dfg.type_of_value(*lhs); + let max_numerator_bits = numerator_type.bit_size(); + + let divisor = dfg + .get_numeric_constant(*rhs) + .expect("rhs is checked to be constant."); + let divisor_bits = divisor.num_bits(); + + // 2^{max_quotient_bits} = 2^{max_numerator_bits} / 2^{divisor_bits} + // => max_quotient_bits = max_numerator_bits - divisor_bits + // + // In order for the truncation to be a noop, we then require `max_quotient_bits < bit_size`. + let max_quotient_bits = max_numerator_bits - divisor_bits; + if max_quotient_bits < *bit_size { + SimplifiedTo(*value) + } else { + None + } + } + + _ => None, } } else { None @@ -619,6 +584,129 @@ fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> Sim } } +/// Try to decompose this constrain instruction. This constraint will be broken down such that it instead constrains +/// all the values which are used to compute the values which were being constrained. +fn decompose_constrain( + lhs: ValueId, + rhs: ValueId, + msg: Option, + dfg: &mut DataFlowGraph, +) -> Vec { + let lhs = dfg.resolve(lhs); + let rhs = dfg.resolve(rhs); + + if lhs == rhs { + // Remove trivial case `assert_eq(x, x)` + Vec::new() + } else { + match (&dfg[lhs], &dfg[rhs]) { + (Value::NumericConstant { constant, typ }, Value::Instruction { instruction, .. }) + | (Value::Instruction { instruction, .. }, Value::NumericConstant { constant, typ }) + if *typ == Type::bool() => + { + match dfg[*instruction] { + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Eq }) + if constant.is_one() => + { + // Replace an explicit two step equality assertion + // + // v2 = eq v0, u32 v1 + // constrain v2 == u1 1 + // + // with a direct assertion of equality between the two values + // + // v2 = eq v0, u32 v1 + // constrain v0 == v1 + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + + vec![Instruction::Constrain(lhs, rhs, msg)] + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Mul }) + if constant.is_one() && dfg.type_of_value(lhs) == Type::bool() => + { + // Replace an equality assertion on a boolean multiplication + // + // v2 = mul v0, v1 + // constrain v2 == u1 1 + // + // with a direct assertion that each value is equal to 1 + // + // v2 = mul v0, v1 + // constrain v0 == 1 + // constrain v1 == 1 + // + // This is due to the fact that for `v2` to be 1 then both `v0` and `v1` are 1. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let one = FieldElement::one(); + let one = dfg.make_constant(one, Type::bool()); + + [ + decompose_constrain(lhs, one, msg.clone(), dfg), + decompose_constrain(rhs, one, msg, dfg), + ] + .concat() + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Or }) + if constant.is_zero() => + { + // Replace an equality assertion on an OR + // + // v2 = or v0, v1 + // constrain v2 == u1 0 + // + // with a direct assertion that each value is equal to 0 + // + // v2 = or v0, v1 + // constrain v0 == 0 + // constrain v1 == 0 + // + // This is due to the fact that for `v2` to be 0 then both `v0` and `v1` are 0. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let zero = FieldElement::zero(); + let zero = dfg.make_constant(zero, dfg.type_of_value(lhs)); + + [ + decompose_constrain(lhs, zero, msg.clone(), dfg), + decompose_constrain(rhs, zero, msg, dfg), + ] + .concat() + } + + Instruction::Not(value) => { + // Replace an assertion that a not instruction is truthy + // + // v1 = not v0 + // constrain v1 == u1 1 + // + // with an assertion that the not instruction input is falsy + // + // v1 = not v0 + // constrain v0 == u1 0 + // + // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let reversed_constant = FieldElement::from(!constant.is_one()); + let reversed_constant = dfg.make_constant(reversed_constant, Type::bool()); + decompose_constrain(value, reversed_constant, msg, dfg) + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } +} + /// The possible return values for Instruction::return_types pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand @@ -1107,6 +1195,10 @@ pub(crate) enum SimplifyResult { /// Replace this function with an simpler but equivalent instruction. SimplifiedToInstruction(Instruction), + /// Replace this function with a set of simpler but equivalent instructions. + /// This is currently only to be used for [`Instruction::Constrain`]. + SimplifiedToInstructionMultiple(Vec), + /// Remove the instruction, it is unnecessary Remove, @@ -1115,9 +1207,10 @@ pub(crate) enum SimplifyResult { } impl SimplifyResult { - pub(crate) fn instruction(self) -> Option { + pub(crate) fn instructions(self) -> Option> { match self { - SimplifyResult::SimplifiedToInstruction(instruction) => Some(instruction), + SimplifyResult::SimplifiedToInstruction(instruction) => Some(vec![instruction]), + SimplifyResult::SimplifiedToInstructionMultiple(instructions) => Some(instructions), _ => None, } } diff --git a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index 7d345a9a4ab..62e91dfa9c8 100644 --- a/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/noir/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -184,7 +184,7 @@ mod test { function_builder::FunctionBuilder, ir::{ function::RuntimeType, - instruction::{BinaryOp, Instruction, TerminatorInstruction}, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, value::Value, @@ -247,6 +247,117 @@ mod test { } } + #[test] + fn redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8. + // The expected return afterwards should be v2. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant guarantees that `v0/constant < 2^8`. We then do not need to truncate the result. + let constant = 2_u128.pow(8); + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(Field 2: Field): + // return Field 9 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + println!("{ssa}"); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 1); + let instruction = &main.dfg[instructions[0]]; + + assert_eq!( + instruction, + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + } + + #[test] + fn non_redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8 - 1. + // This should not result in the truncation being removed. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant does not guarantee that `v0/constant < 2^8`. We must then truncate the result. + let constant = 2_u128.pow(8) - 1; + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(v0: u16, Field 255: Field): + // v5 = div v0, Field 255 + // v6 = truncate v5 to 8 bits, max_bit_size: 16 + // return v6 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); + + assert_eq!( + &main.dfg[instructions[0]], + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + assert_eq!( + &main.dfg[instructions[1]], + &Instruction::Truncate { value: ValueId::test_new(5), bit_size: 8, max_bit_size: 16 } + ); + } + #[test] fn arrays_elements_are_updated() { // fn main f0 { @@ -334,4 +445,67 @@ mod test { assert_eq!(instruction, &Instruction::Cast(v0, Type::unsigned(32))); } + + #[test] + fn constraint_decomposition() { + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v4 u1 1 + // } + // + // When constructing this IR, we should automatically decompose the constraint to be in terms of `v0`, `v1` and `v2`. + // + // The mul instructions are retained and will be removed in the dead instruction elimination pass. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::bool()); + let v1 = builder.add_parameter(Type::bool()); + let v2 = builder.add_parameter(Type::bool()); + + let v3 = builder.insert_binary(v0, BinaryOp::Mul, v1); + let v4 = builder.insert_not(v2); + let v5 = builder.insert_binary(v3, BinaryOp::Mul, v4); + + // This constraint is automatically decomposed when it is inserted. + let v_true = builder.numeric_constant(true, Type::bool()); + builder.insert_constrain(v5, v_true, None); + + let v_false = builder.numeric_constant(false, Type::bool()); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v0 u1 1 + // constrain v1 u1 1 + // constrain v2 u1 0 + // } + + let ssa = builder.finish(); + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + + assert_eq!(instructions.len(), 6); + + assert_eq!( + main.dfg[instructions[0]], + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Mul, rhs: v1 }) + ); + assert_eq!(main.dfg[instructions[1]], Instruction::Not(v2)); + assert_eq!( + main.dfg[instructions[2]], + Instruction::Binary(Binary { lhs: v3, operator: BinaryOp::Mul, rhs: v4 }) + ); + assert_eq!(main.dfg[instructions[3]], Instruction::Constrain(v0, v_true, None)); + assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); + assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); + } } diff --git a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs index 9c46ef35854..fbf94468c4b 100644 --- a/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/noir/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -64,6 +64,7 @@ pub struct CrateDefMap { pub(crate) krate: CrateId, + /// Maps an external dependency's name to its root module id. pub(crate) extern_prelude: BTreeMap, } diff --git a/noir/cspell.json b/noir/cspell.json index 8e3f248acfa..a44cdc88e56 100644 --- a/noir/cspell.json +++ b/noir/cspell.json @@ -86,6 +86,7 @@ "jsdoc", "Jubjub", "keccak", + "keccakf", "krate", "lvalue", "mathbb", diff --git a/noir/docs/src/pages/index.jsx b/noir/docs/src/pages/index.jsx index 8485a730785..d5cbfcba977 100644 --- a/noir/docs/src/pages/index.jsx +++ b/noir/docs/src/pages/index.jsx @@ -38,7 +38,7 @@ export default function Landing() {

Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR - compatible proving system. It's design choices are influenced heavily by Rust and focuses on a simple, + compatible proving system. Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax.

diff --git a/noir/noir_stdlib/src/lib.nr b/noir/noir_stdlib/src/lib.nr index ab984cafc88..23a7c71ff45 100644 --- a/noir/noir_stdlib/src/lib.nr +++ b/noir/noir_stdlib/src/lib.nr @@ -23,6 +23,7 @@ mod cmp; mod ops; mod default; mod prelude; +mod uint128; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident diff --git a/noir/noir_stdlib/src/prelude.nr b/noir/noir_stdlib/src/prelude.nr index 56020509122..b57ff460371 100644 --- a/noir/noir_stdlib/src/prelude.nr +++ b/noir/noir_stdlib/src/prelude.nr @@ -1,5 +1,6 @@ use crate::collections::vec::Vec; use crate::option::Option; use crate::{print, println, assert_constant}; +use crate::uint128::U128; use crate::cmp::{Eq, Ord}; use crate::default::Default; diff --git a/noir/noir_stdlib/src/uint128.nr b/noir/noir_stdlib/src/uint128.nr new file mode 100644 index 00000000000..4a58b3868be --- /dev/null +++ b/noir/noir_stdlib/src/uint128.nr @@ -0,0 +1,292 @@ +use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::cmp::{Eq, Ord, Ordering}; + +global pow64 : Field = 18446744073709551616; //2^64; + +struct U128 { + lo: Field, + hi: Field, +} + +impl U128 { + + pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { + // in order to handle multiplication, we need to represent the product of two u64 without overflow + assert(crate::field::modulus_num_bits() as u32 > 128); + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { + U128::from_u64s_le(lo,hi) + } + + pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { + let mut lo = 0; + let mut base = 1; + for i in 0..8 { + lo += (bytes[i] as Field)*base; + base *= 256; + } + let mut hi = 0; + base = 1; + for i in 8..16 { + hi += (bytes[i] as Field)*base; + base *= 256; + } + U128 { + lo, + hi, + } + } + + pub fn to_le_bytes(self: Self) -> [u8; 16] { + let lo = self.lo.to_le_bytes(8); + let hi = self.hi.to_le_bytes(8); + let mut bytes = [0;16]; + for i in 0..8 { + bytes[i] = lo[i]; + bytes[i+8] = hi[i]; + } + bytes + } + + pub fn from_hex(hex: str) -> U128 { + let N = N as u32; + let bytes = hex.as_bytes(); + // string must starts with "0x" + assert((bytes[0] == 48) & (bytes[1] == 120), "Invalid hexadecimal string"); + assert(N < 35, "Input does not fit into a U128"); + + let mut lo = 0; + let mut hi = 0; + let mut base = 1; + if N <= 18 { + for i in 0..N-2 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + } else { + for i in 0..16 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + base = 1; + for i in 17..N-1 { + hi += U128::decode_ascii(bytes[N-i])*base; + base = base*16; + } + } + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + fn decode_ascii(ascii: u8) -> Field { + if ascii < 58 { + ascii - 48 + } else { + if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } + + } as Field + } + + unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { + if self < b { + (U128::from_u64s_le(0, 0), self) + } else { + //TODO check if this can overflow? + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); + let q_mul_2 = q * U128::from_u64s_le(2,0); + if r < b { + (q_mul_2, r) + } else { + (q_mul_2 + U128::from_u64s_le(1,0), r - b) + } + + } + } + + pub fn from_integer(i: T) -> U128 { + let f = crate::as_field(i); + let lo = f as u64 as Field; + let hi = (f-lo) / pow64; + U128 { + lo, + hi, + } + } + + pub fn to_integer(self) -> T { + crate::from_field(self.lo+self.hi*pow64) + } + + fn wrapping_mul(self: Self, b: U128) -> U128 { + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + U128 { + lo, + hi, + } + } +} + +impl Add for U128 { + pub fn add(self: Self, b: U128) -> U128 { + let low = self.lo + b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = self.hi + b.hi + carry; + let hi = high as u64 as Field; + assert(hi == high, "attempt to add with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Sub for U128 { + pub fn sub(self: Self, b: U128) -> U128 { + let low = pow64 + self.lo - b.lo; + let lo = low as u64 as Field; + let borrow = (low == lo) as Field; + let high = self.hi - b.hi - borrow; + let hi = high as u64 as Field; + assert(hi == high, "attempt to subtract with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Mul for U128 { + pub fn mul(self: Self, b: U128) -> U128 { + assert(self.hi*b.hi == 0, "attempt to multiply with overflow"); + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + assert(hi == high, "attempt to multiply with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Div for U128 { + pub fn div(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + q + } +} + +impl Rem for U128 { + pub fn rem(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + r + } +} + +impl Eq for U128 { + pub fn eq(self: Self, b: U128) -> bool { + (self.lo == b.lo) & (self.hi == b.hi) + } +} + +impl Ord for U128 { + fn cmp(self, other: Self) -> Ordering { + let hi_ordering = (self.hi as u64).cmp((other.hi as u64)); + let lo_ordering = (self.lo as u64).cmp((other.lo as u64)); + + if hi_ordering == Ordering::equal() { + lo_ordering + } else { + hi_ordering + } + } +} + +impl BitOr for U128 { + fn bitor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) | (other.lo as u64)) as Field, + hi: ((self.hi as u64) | (other.hi as u64))as Field + } + } +} + +impl BitAnd for U128 { + fn bitand(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) & (other.lo as u64)) as Field, + hi: ((self.hi as u64) & (other.hi as u64)) as Field + } + } +} + +impl BitXor for U128 { + fn bitxor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) ^ (other.lo as u64)) as Field, + hi: ((self.hi as u64) ^ (other.hi as u64)) as Field + } + } +} + +impl Shl for U128 { + fn shl(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift left with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self.wrapping_mul(U128::from_integer(y)) + } +} + +impl Shr for U128 { + fn shr(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift right with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self / U128::from_integer(y) + } +} \ No newline at end of file diff --git a/noir/scripts/nargo_compile_noir_codegen_assert_lt.sh b/noir/scripts/nargo_compile_noir_codegen_assert_lt.sh deleted file mode 100755 index 858a16cf517..00000000000 --- a/noir/scripts/nargo_compile_noir_codegen_assert_lt.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -cd ./tooling/noir_codegen/test/assert_lt -nargo compile \ No newline at end of file diff --git a/noir/test_programs/compile_success_empty/reexports/Nargo.toml b/noir/test_programs/compile_success_empty/reexports/Nargo.toml new file mode 100644 index 00000000000..4a87f28fd89 --- /dev/null +++ b/noir/test_programs/compile_success_empty/reexports/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexports" +type = "bin" +authors = [""] + +[dependencies] +reexporting_lib = { path = "../../test_libraries/reexporting_lib" } diff --git a/noir/test_programs/compile_success_empty/reexports/src/main.nr b/noir/test_programs/compile_success_empty/reexports/src/main.nr new file mode 100644 index 00000000000..bb94b21b221 --- /dev/null +++ b/noir/test_programs/compile_success_empty/reexports/src/main.nr @@ -0,0 +1,8 @@ +use dep::reexporting_lib::{FooStruct, MyStruct, lib}; + +fn main() { + let x: FooStruct = MyStruct { + inner: 0 + }; + assert(lib::is_struct_zero(x)); +} diff --git a/noir/test_programs/execution_success/u128/Nargo.toml b/noir/test_programs/execution_success/u128/Nargo.toml new file mode 100644 index 00000000000..c1dcd84db04 --- /dev/null +++ b/noir/test_programs/execution_success/u128/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "u128" +type = "bin" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/execution_success/u128/Prover.toml b/noir/test_programs/execution_success/u128/Prover.toml new file mode 100644 index 00000000000..961db9825a7 --- /dev/null +++ b/noir/test_programs/execution_success/u128/Prover.toml @@ -0,0 +1,7 @@ +x = "3" +y = "4" +z = "7" +hexa ="0x1f03a" +[big_int] +lo = 1 +hi = 2 \ No newline at end of file diff --git a/noir/test_programs/execution_success/u128/src/main.nr b/noir/test_programs/execution_success/u128/src/main.nr new file mode 100644 index 00000000000..4c734f3a8f9 --- /dev/null +++ b/noir/test_programs/execution_success/u128/src/main.nr @@ -0,0 +1,44 @@ +use dep::std; + +fn main(mut x: u32, y: u32, z: u32, big_int: U128, hexa: str<7>) { + let a = U128::from_u64s_le(x as u64, x as u64); + let b = U128::from_u64s_le(y as u64, x as u64); + let c = a + b; + assert(c.lo == z as Field); + assert(c.hi == 2 * x as Field); + assert(U128::from_hex(hexa).lo == 0x1f03a); + let t1 = U128::from_hex("0x9d9c7a87771f03a23783f9d9c7a8777"); + let t2 = U128::from_hex("0x45a26c708BFCF39041"); + let t = t1 + t2; + assert(t.lo == 0xc5e4b029996e17b8); + assert(t.hi == 0x09d9c7a87771f07f); + let t3 = U128::from_le_bytes(t.to_le_bytes()); + assert(t == t3); + + let t4 = t - t2; + assert(t4 == t1); + + let t5 = U128::from_u64s_le(0, 1); + let t6 = U128::from_u64s_le(1, 0); + assert((t5 - t6).hi == 0); + + assert( + (U128::from_hex("0x71f03a23783f9d9c7a8777") * U128::from_hex("0x8BFCF39041")).hi + == U128::from_hex("0x3e4e0471b873470e247c824e61445537").hi + ); + let q = U128::from_hex("0x3e4e0471b873470e247c824e61445537") / U128::from_hex("0x8BFCF39041"); + assert(q == U128::from_hex("0x71f03a23783f9d9c7a8777")); + + assert(big_int.hi == 2); + + let mut small_int = U128::from_integer(x); + assert(small_int.lo == x as Field); + assert(x == small_int.to_integer()); + let shift = small_int << small_int; + assert(shift == U128::from_integer(x << x)); + assert(shift >> small_int == small_int); + assert(shift >> U128::from_integer(127) == U128::from_integer(0)); + assert(shift << U128::from_integer(127) == U128::from_integer(0)); + +} + diff --git a/noir/test_programs/test_libraries/exporting_lib/Nargo.toml b/noir/test_programs/test_libraries/exporting_lib/Nargo.toml new file mode 100644 index 00000000000..628418c0608 --- /dev/null +++ b/noir/test_programs/test_libraries/exporting_lib/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "exporting_lib" +type = "lib" +authors = [""] + +[dependencies] diff --git a/noir/test_programs/test_libraries/exporting_lib/src/lib.nr b/noir/test_programs/test_libraries/exporting_lib/src/lib.nr new file mode 100644 index 00000000000..af1fd7a32de --- /dev/null +++ b/noir/test_programs/test_libraries/exporting_lib/src/lib.nr @@ -0,0 +1,10 @@ + +struct MyStruct { + inner: Field +} + +type FooStruct = MyStruct; + +fn is_struct_zero(val: MyStruct) -> bool { + val.inner == 0 +} diff --git a/noir/test_programs/test_libraries/reexporting_lib/Nargo.toml b/noir/test_programs/test_libraries/reexporting_lib/Nargo.toml new file mode 100644 index 00000000000..c26ce501e56 --- /dev/null +++ b/noir/test_programs/test_libraries/reexporting_lib/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexporting_lib" +type = "lib" +authors = [""] + +[dependencies] +exporting_lib = { path = "../exporting_lib" } diff --git a/noir/test_programs/test_libraries/reexporting_lib/src/lib.nr b/noir/test_programs/test_libraries/reexporting_lib/src/lib.nr new file mode 100644 index 00000000000..f12dfe01ecd --- /dev/null +++ b/noir/test_programs/test_libraries/reexporting_lib/src/lib.nr @@ -0,0 +1,3 @@ +use dep::exporting_lib::{MyStruct, FooStruct}; + +use dep::exporting_lib as lib; diff --git a/noir/tooling/noir_codegen/test/test_lib/Nargo.toml b/noir/tooling/noir_codegen/test/test_lib/Nargo.toml index 74b6167b614..a59af9e6fc3 100644 --- a/noir/tooling/noir_codegen/test/test_lib/Nargo.toml +++ b/noir/tooling/noir_codegen/test/test_lib/Nargo.toml @@ -2,4 +2,5 @@ name = "test_lib" type = "lib" authors = [""] + [dependencies] diff --git a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr index ebaca1fd70c..f900430cb35 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/private-kernel-lib/src/private_kernel_ordering.nr @@ -382,7 +382,7 @@ mod tests { assert(array_length(public_inputs.end.new_nullifiers) == 3); } - #[test(should_fail_with="New nullifier is transient but hint is invalid")] + #[test(should_fail)] fn invalid_nullifier_commitment_hint_fails() { let mut builder = PrivateKernelOrderingInputsBuilder::new(); diff --git a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr index e38c5f5ae67..8f833310eba 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/public-kernel-lib/src/common.nr @@ -30,7 +30,7 @@ use dep::types::{ utils::{ arrays::{array_length, array_to_bounded_vec}, bounded_vec::BoundedVec, - uint128::U128, + uint128::AztecU128, }, traits::is_empty_array }; @@ -280,10 +280,10 @@ pub fn accumulate_unencrypted_logs( let current_unencrypted_logs_hash = public_call_public_inputs.unencrypted_logs_hash; public_inputs.end.unencrypted_logs_hash = accumulate_sha256([ - U128::from_field(previous_unencrypted_logs_hash[0]), - U128::from_field(previous_unencrypted_logs_hash[1]), - U128::from_field(current_unencrypted_logs_hash[0]), - U128::from_field(current_unencrypted_logs_hash[1]) + AztecU128::from_field(previous_unencrypted_logs_hash[0]), + AztecU128::from_field(previous_unencrypted_logs_hash[1]), + AztecU128::from_field(current_unencrypted_logs_hash[0]), + AztecU128::from_field(current_unencrypted_logs_hash[1]) ]); // Add log preimages lengths from current iteration to accumulated lengths diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr index 3cb598849cf..10eb323d07f 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/abis/base_or_merge_rollup_public_inputs.nr @@ -24,8 +24,8 @@ struct BaseOrMergeRollupPublicInputs { end: PartialStateReference, // We hash public inputs to make them constant-sized (to then be unpacked on-chain) - // U128 isn't safe if it's an input to the circuit (it won't automatically constrain the witness) - // So we want to constrain it when casting these fields to U128 + // AztecU128 isn't safe if it's an input to the circuit (it won't automatically constrain the witness) + // So we want to constrain it when casting these fields to AztecU128 // TODO(#3938): split this to txs_hash and out_hash // We hash public inputs to make them constant-sized (to then be unpacked on-chain) diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr index 4aafd945173..272e0f292b6 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/components.nr @@ -1,7 +1,7 @@ use crate::abis::base_or_merge_rollup_public_inputs::BaseOrMergeRollupPublicInputs; use dep::types::mocked::AggregationObject; use dep::types::hash::{accumulate_sha256, assert_check_membership, root_from_sibling_path}; -use dep::types::utils::uint128::U128; +use dep::types::utils::uint128::AztecU128; use dep::types::constants::NUM_FIELDS_PER_SHA256; use crate::abis::previous_rollup_data::PreviousRollupData; use dep::types::abis::append_only_tree_snapshot::AppendOnlyTreeSnapshot; @@ -86,10 +86,10 @@ pub fn assert_prev_rollups_follow_on_from_each_other( pub fn compute_calldata_hash(previous_rollup_data: [PreviousRollupData; 2]) -> [Field; NUM_FIELDS_PER_SHA256] { accumulate_sha256( [ - U128::from_field(previous_rollup_data[0].base_or_merge_rollup_public_inputs.calldata_hash[0]), - U128::from_field(previous_rollup_data[0].base_or_merge_rollup_public_inputs.calldata_hash[1]), - U128::from_field(previous_rollup_data[1].base_or_merge_rollup_public_inputs.calldata_hash[0]), - U128::from_field(previous_rollup_data[1].base_or_merge_rollup_public_inputs.calldata_hash[1]) + AztecU128::from_field(previous_rollup_data[0].base_or_merge_rollup_public_inputs.calldata_hash[0]), + AztecU128::from_field(previous_rollup_data[0].base_or_merge_rollup_public_inputs.calldata_hash[1]), + AztecU128::from_field(previous_rollup_data[1].base_or_merge_rollup_public_inputs.calldata_hash[0]), + AztecU128::from_field(previous_rollup_data[1].base_or_merge_rollup_public_inputs.calldata_hash[1]) ] ) } diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/merge/merge_rollup_inputs.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/merge/merge_rollup_inputs.nr index d844322982d..bfd3690e5f2 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/merge/merge_rollup_inputs.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/merge/merge_rollup_inputs.nr @@ -48,7 +48,7 @@ mod tests { tests::merge_rollup_inputs::default_merge_rollup_inputs, }; use dep::types::hash::accumulate_sha256; - use dep::types::utils::uint128::U128; + use dep::types::utils::uint128::AztecU128; #[test(should_fail_with="input proofs are of different rollup types")] fn different_rollup_type_fails() { @@ -141,10 +141,10 @@ mod tests { let mut inputs = default_merge_rollup_inputs(); let expected_calldata_hash = accumulate_sha256( [ - U128::from_field(0), - U128::from_field(1), - U128::from_field(2), - U128::from_field(3) + AztecU128::from_field(0), + AztecU128::from_field(1), + AztecU128::from_field(2), + AztecU128::from_field(3) ] ); let outputs = inputs.merge_rollup_circuit(); diff --git a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr index 8192e44ab76..0fff41cc8fe 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/rollup-lib/src/root.nr @@ -141,7 +141,7 @@ mod tests { }, tests::root_rollup_inputs::default_root_rollup_inputs, }; - use dep::types::utils::uint128::U128; + use dep::types::utils::uint128::AztecU128; use dep::types::utils::uint256::U256; use dep::types::hash::accumulate_sha256; @@ -151,10 +151,10 @@ mod tests { let expected_calldata_hash = accumulate_sha256( [ - U128::from_field(0), - U128::from_field(1), - U128::from_field(2), - U128::from_field(3) + AztecU128::from_field(0), + AztecU128::from_field(1), + AztecU128::from_field(2), + AztecU128::from_field(3) ] ); diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr index 8a5f562166f..0fd14eca1ff 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/hash.nr @@ -5,7 +5,7 @@ use crate::abis::function_leaf_preimage::FunctionLeafPreimage; use crate::abis::new_contract_data::NewContractData as ContractLeafPreimage; use crate::abis::function_data::FunctionData; use crate::abis::side_effect::{SideEffect}; -use crate::utils::uint128::U128; +use crate::utils::uint128::AztecU128; use crate::utils::uint256::U256; use crate::utils::bounded_vec::BoundedVec; use crate::constants::{ @@ -243,9 +243,9 @@ pub fn compute_constructor_hash( // Returning a Field would be desirable because then this can be replaced with // poseidon without changing the rest of the code // -pub fn accumulate_sha256(input: [U128; 4]) -> [Field; NUM_FIELDS_PER_SHA256] { +pub fn accumulate_sha256(input: [AztecU128; 4]) -> [Field; NUM_FIELDS_PER_SHA256] { // This is a note about the cpp code, since it takes an array of Fields - // instead of a U128. + // instead of a AztecU128. // 4 Field elements when converted to bytes will usually // occupy 4 * 32 = 128 bytes. // However, this function is making the assumption that each Field @@ -273,10 +273,10 @@ pub fn compute_logs_hash( ) -> [Field; NUM_FIELDS_PER_SHA256] { accumulate_sha256( [ - U128::from_field(previous_log_hash[0]), - U128::from_field(previous_log_hash[1]), - U128::from_field(current_log_hash[0]), - U128::from_field(current_log_hash[1]) + AztecU128::from_field(previous_log_hash[0]), + AztecU128::from_field(previous_log_hash[1]), + AztecU128::from_field(current_log_hash[0]), + AztecU128::from_field(current_log_hash[1]) ] ) } diff --git a/yarn-project/noir-protocol-circuits/src/crates/types/src/utils/uint128.nr b/yarn-project/noir-protocol-circuits/src/crates/types/src/utils/uint128.nr index fcaad135ec4..f65935ae8ff 100644 --- a/yarn-project/noir-protocol-circuits/src/crates/types/src/utils/uint128.nr +++ b/yarn-project/noir-protocol-circuits/src/crates/types/src/utils/uint128.nr @@ -1,23 +1,23 @@ // This is a diversion from the cpp code. // The cpp code uses Fields for log_hashes // whereas we are using u128 to make sure that it is really a u128. -struct U128 { +struct AztecU128 { inner : Field } -impl U128 { +impl AztecU128 { pub fn to_field(self) -> Field { self.inner as Field } - pub fn from_field(value : Field) -> U128 { + pub fn from_field(value : Field) -> AztecU128 { // TODO(Kev): Apply 128 bit range constraint and fail if this is not the case. // We can expose a `apply_range_constraint` method from Noir which can take a field // and return a Field. // It won't be type-safe, but thats fine. We may then be able to implement // u128 in the stdlib and have it be called automatically when a user // does `let x :u128 = 0;` We will require traits to make operations nice. - U128{inner : value} + AztecU128{inner : value} } pub fn to_be_bytes(self) -> [u8;16] {