diff --git a/.dockerignore b/.dockerignore index 8bea0aeeb1a..559b271bf38 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,6 +1,15 @@ Dockerfile* .dockerignore +# Yarn +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/sdks +!.yarn/versions + packages **/package.tgz **/target diff --git a/.github/DEAD_LINKS_IN_DOCS.md b/.github/DEAD_LINKS_IN_DOCS.md new file mode 100644 index 00000000000..b1671276dcf --- /dev/null +++ b/.github/DEAD_LINKS_IN_DOCS.md @@ -0,0 +1,11 @@ +--- +title: "Docs contains dead links" +assignees: signorecello catmcgee critesjosh jzaki Savio-Sou +labels: documentation +--- + +Some of the external links in the docs are now dead. This is likely due to the thing being linked to being moved. + +Check the [Check Markdown links]({{env.WORKFLOW_URL}}) workflow for details. + +This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/NIGHTLY_TEST_FAILURE.md b/.github/NIGHTLY_TEST_FAILURE.md deleted file mode 100644 index 05772d82a51..00000000000 --- a/.github/NIGHTLY_TEST_FAILURE.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "nightly test-integration failed" -assignees: kobyhallx, tomafrench, jonybur -labels: bug ---- - -Something broke our nightly integration test. - -Check the [test]({{env.WORKFLOW_URL}}) workflow for details. - -This issue was raised by the workflow `{{env.WORKFLOW_NAME}}` diff --git a/.github/actions/docs/build-status/script.sh b/.github/actions/docs/build-status/script.sh index 0b282557cf2..2e86de6c173 100755 --- a/.github/actions/docs/build-status/script.sh +++ b/.github/actions/docs/build-status/script.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash BRANCH_NAME=$(echo "$BRANCH_NAME" | sed -e "s#refs/[^/]*/##") DEPLOY_STATUS=$(curl -X GET "https://api.netlify.com/api/v1/sites/$SITE_ID/deploys?branch=$BRANCH_NAME" | jq -r '.[] | select(.created_at != null) | .state' | head -1) diff --git a/.github/actions/setup/action.yml b/.github/actions/setup/action.yml index 8e24b6738a9..b265a63d29a 100644 --- a/.github/actions/setup/action.yml +++ b/.github/actions/setup/action.yml @@ -4,7 +4,7 @@ description: Installs the workspace's yarn dependencies and caches them runs: using: composite steps: - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 id: node with: node-version: 18.17.1 diff --git a/.github/scripts/acvm_js-build.sh b/.github/scripts/acvm_js-build.sh new file mode 100755 index 00000000000..95bd1efc8b9 --- /dev/null +++ b/.github/scripts/acvm_js-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/wasm-bindgen-install.sh +yarn workspace @noir-lang/acvm_js build diff --git a/.github/scripts/acvm_js-test-browser.sh b/.github/scripts/acvm_js-test-browser.sh new file mode 100755 index 00000000000..598c98dadf2 --- /dev/null +++ b/.github/scripts/acvm_js-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/acvm_js test:browser diff --git a/.github/scripts/acvm_js-test.sh b/.github/scripts/acvm_js-test.sh new file mode 100755 index 00000000000..d5519d26cc4 --- /dev/null +++ b/.github/scripts/acvm_js-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/acvm_js test diff --git a/.github/scripts/backend-barretenberg-build.sh b/.github/scripts/backend-barretenberg-build.sh new file mode 100755 index 00000000000..d90995397d8 --- /dev/null +++ b/.github/scripts/backend-barretenberg-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg build diff --git a/.github/scripts/backend-barretenberg-test.sh b/.github/scripts/backend-barretenberg-test.sh new file mode 100755 index 00000000000..1bd6f8e410d --- /dev/null +++ b/.github/scripts/backend-barretenberg-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/backend_barretenberg test diff --git a/.github/scripts/integration-test.sh b/.github/scripts/integration-test.sh new file mode 100755 index 00000000000..4e1b52cedf9 --- /dev/null +++ b/.github/scripts/integration-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +apt-get install libc++-dev -y +npx playwright install && npx playwright install-deps +yarn workspace integration-tests test \ No newline at end of file diff --git a/.github/scripts/nargo-build.sh b/.github/scripts/nargo-build.sh new file mode 100755 index 00000000000..2115732ab7e --- /dev/null +++ b/.github/scripts/nargo-build.sh @@ -0,0 +1,8 @@ +#!/bin/bash +set -eu + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo build --release diff --git a/.github/scripts/nargo-test.sh b/.github/scripts/nargo-test.sh new file mode 100755 index 00000000000..9234df7bf5c --- /dev/null +++ b/.github/scripts/nargo-test.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -eu + +apt-get install -y curl libc++-dev + +export SOURCE_DATE_EPOCH=$(date +%s) +export GIT_DIRTY=false +export GIT_COMMIT=$(git rev-parse --verify HEAD) + +cargo test --workspace --locked --release \ No newline at end of file diff --git a/.github/scripts/noir-codegen-build.sh b/.github/scripts/noir-codegen-build.sh new file mode 100755 index 00000000000..d42be4d676e --- /dev/null +++ b/.github/scripts/noir-codegen-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen build diff --git a/.github/scripts/noir-codegen-test.sh b/.github/scripts/noir-codegen-test.sh new file mode 100755 index 00000000000..6f603f65507 --- /dev/null +++ b/.github/scripts/noir-codegen-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_codegen test \ No newline at end of file diff --git a/.github/scripts/noir-js-build.sh b/.github/scripts/noir-js-build.sh new file mode 100755 index 00000000000..04367e41342 --- /dev/null +++ b/.github/scripts/noir-js-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noir_js build diff --git a/.github/scripts/noir-js-test.sh b/.github/scripts/noir-js-test.sh new file mode 100755 index 00000000000..b5fe34038fe --- /dev/null +++ b/.github/scripts/noir-js-test.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_noir_js_assert_lt.sh +rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json +yarn workspace @noir-lang/noir_js test \ No newline at end of file diff --git a/.github/scripts/noir-js-types-build.sh b/.github/scripts/noir-js-types-build.sh new file mode 100755 index 00000000000..77b08651d68 --- /dev/null +++ b/.github/scripts/noir-js-types-build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/types build \ No newline at end of file diff --git a/.github/scripts/noir-wasm-build.sh b/.github/scripts/noir-wasm-build.sh new file mode 100755 index 00000000000..f799387b6f6 --- /dev/null +++ b/.github/scripts/noir-wasm-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/wasm-pack-install.sh +yarn workspace @noir-lang/noir_wasm build diff --git a/.github/scripts/noir-wasm-test-browser.sh b/.github/scripts/noir-wasm-test-browser.sh new file mode 100755 index 00000000000..4b584abce23 --- /dev/null +++ b/.github/scripts/noir-wasm-test-browser.sh @@ -0,0 +1,6 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser \ No newline at end of file diff --git a/.github/scripts/noir-wasm-test.sh b/.github/scripts/noir-wasm-test.sh new file mode 100755 index 00000000000..03e1bac2330 --- /dev/null +++ b/.github/scripts/noir-wasm-test.sh @@ -0,0 +1,7 @@ +#!/bin/bash +set -eu + +./scripts/nargo_compile_wasm_fixtures.sh +yarn workspace @noir-lang/noir_wasm test:node +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noir_wasm test:browser diff --git a/.github/scripts/noirc-abi-build.sh b/.github/scripts/noirc-abi-build.sh new file mode 100755 index 00000000000..23b8393088e --- /dev/null +++ b/.github/scripts/noirc-abi-build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +.github/scripts/wasm-bindgen-install.sh +yarn workspace @noir-lang/noirc_abi build diff --git a/.github/scripts/noirc-abi-test-browser.sh b/.github/scripts/noirc-abi-test-browser.sh new file mode 100755 index 00000000000..7a966cb5e94 --- /dev/null +++ b/.github/scripts/noirc-abi-test-browser.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +npx playwright install && npx playwright install-deps +yarn workspace @noir-lang/noirc_abi test:browser diff --git a/.github/scripts/noirc-abi-test.sh b/.github/scripts/noirc-abi-test.sh new file mode 100755 index 00000000000..39ca0a44b07 --- /dev/null +++ b/.github/scripts/noirc-abi-test.sh @@ -0,0 +1,4 @@ +#!/bin/bash +set -eu + +yarn workspace @noir-lang/noirc_abi test diff --git a/.github/scripts/wasm-bindgen-install.sh b/.github/scripts/wasm-bindgen-install.sh new file mode 100755 index 00000000000..b8c41393ab0 --- /dev/null +++ b/.github/scripts/wasm-bindgen-install.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash +cargo-binstall wasm-bindgen-cli --version 0.2.86 -y diff --git a/.github/scripts/wasm-pack-install.sh b/.github/scripts/wasm-pack-install.sh new file mode 100755 index 00000000000..f9b2fe160d5 --- /dev/null +++ b/.github/scripts/wasm-pack-install.sh @@ -0,0 +1,5 @@ +#!/bin/bash +set -eu + +curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash +cargo-binstall wasm-pack --version 0.12.1 -y diff --git a/.github/workflows/acir-artifacts.yml b/.github/workflows/acir-artifacts.yml deleted file mode 100644 index 43d9acfdedb..00000000000 --- a/.github/workflows/acir-artifacts.yml +++ /dev/null @@ -1,107 +0,0 @@ -name: Build ACIR artifacts - -on: - pull_request: - push: - branches: - - master - -jobs: - check-artifacts-requested: - name: Check if artifacts should be published - runs-on: ubuntu-22.04 - outputs: - publish: ${{ steps.check.outputs.result }} - - steps: - - name: Check if artifacts should be published - id: check - uses: actions/github-script@v6 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const { REF_NAME } = process.env; - if (REF_NAME == "master") { - console.log(`publish = true`) - return true; - } - - const labels = context.payload.pull_request.labels.map(label => label.name); - const publish = labels.includes('publish-acir'); - - console.log(`publish = ${publish}`) - return publish; - result-encoding: string - env: - REF_NAME: ${{ github.ref_name }} - - build-nargo: - name: Build nargo binary - if: ${{ needs.check-artifacts-requested.outputs.publish == 'true' }} - runs-on: ubuntu-22.04 - needs: [check-artifacts-requested] - strategy: - matrix: - target: [x86_64-unknown-linux-gnu] - - steps: - - name: Checkout Noir repo - uses: actions/checkout@v4 - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.71.1 - - - uses: Swatinem/rust-cache@v2 - with: - key: ${{ matrix.target }} - cache-on-failure: true - save-if: ${{ github.event_name != 'merge_group' }} - - - name: Build Nargo - run: cargo build --package nargo_cli --release - - - name: Package artifacts - run: | - mkdir dist - cp ./target/release/nargo ./dist/nargo - 7z a -ttar -so -an ./dist/* | 7z a -si ./nargo-x86_64-unknown-linux-gnu.tar.gz - - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - name: nargo - path: ./dist/* - retention-days: 3 - - auto-pr-rebuild-script: - name: Rebuild ACIR artifacts - needs: [build-nargo] - runs-on: ubuntu-latest - - steps: - - name: Check out code - uses: actions/checkout@v4 - - - name: Download nargo binary - uses: actions/download-artifact@v3 - with: - name: nargo - path: ./nargo - - - name: Add Nargo to $PATH - run: | - chmod +x ${{ github.workspace }}/nargo/nargo - echo "${{ github.workspace }}/nargo" >> $GITHUB_PATH - - - name: Run rebuild script - working-directory: test_programs - run: | - chmod +x ./rebuild.sh - ./rebuild.sh - - - name: Upload ACIR artifacts - uses: actions/upload-artifact@v3 - with: - name: acir-artifacts - path: ./test_programs/acir_artifacts - retention-days: 10 diff --git a/.github/workflows/docker-test-flow.yml b/.github/workflows/docker-test-flow.yml new file mode 100644 index 00000000000..4b4a2ac2add --- /dev/null +++ b/.github/workflows/docker-test-flow.yml @@ -0,0 +1,751 @@ +name: Test Nargo and JS packages + +on: + push: + branches: + - 'master' + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + +jobs: + build-base-nargo: + name: Build base nargo docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: prepare docker images tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-nargo" + FULL_TAGS="${TAGS},${IMAGE}:latest-nargo,${IMAGE}:v${{ steps.date.outputs.date }}-nargo" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build nargo base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-nargo + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + build-base-js: + name: Build base js docker image + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Get current date + id: date + run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE + - name: Prepare docker image tags + id: prep + run: | + REGISTRY="ghcr.io" + IMG_RAW="${REGISTRY}/${{ github.repository }}" + IMAGE=$(echo "$IMG_RAW" | tr '[:upper:]' '[:lower:]') + TAGS="${IMAGE}:${{ github.sha }}-js" + FULL_TAGS="${TAGS},${IMAGE}:latest-js,${IMAGE}:v${{ steps.date.outputs.date }}-js" + echo "tags=$FULL_TAGS" >> $GITHUB_OUTPUT + echo "image=$IMAGE" >> $GITHUB_OUTPUT + - name: Set up Docker Buildx + id: buildx + uses: docker/setup-buildx-action@v3 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - name: Build js base dockerfile + uses: docker/build-push-action@v5 + with: + context: . + file: Dockerfile.ci + tags: ${{ steps.prep.outputs.tags }} + target: base-js + cache-from: type=gha + cache-to: type=gha,mode=max + push: true + + artifact-nargo: + name: Artifact nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Artifact nargo + uses: actions/upload-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release/nargo + if-no-files-found: error + compression-level: 0 + + test-nargo: + name: Test nargo + runs-on: ubuntu-latest + needs: [build-base-nargo] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-nargo + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Test + working-directory: /usr/src/noir + run: | + .github/scripts/nargo-test.sh + + build-noir-wasm: + name: Build noir wasm + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + retention-days: 10 + + test-noir-wasm: + name: Test noir wasm + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test.sh + + test-noir-wasm-browser: + name: Test noir wasm browser + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noir-wasm] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir_wasm artifact + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-wasm-test-browser.sh + + build-acvm_js: + name: Build acvm js + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: acvm_js + path: + /usr/src/noir/acvm-repo/acvm_js/outputs/out/acvm_js + if-no-files-found: error + compression-level: 0 + + test-acvm_js: + name: Test acvm js + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test.sh + + test-acvm_js-browser: + name: Test acvm js browser + runs-on: ubuntu-latest + needs: [build-base-js, build-acvm_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/acvm_js-test-browser.sh + + build-noirc-abi: + name: Build noirc abi + runs-on: ubuntu-latest + needs: [build-base-js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noirc_abi_wasm + path: + /usr/src/noir/tooling/noirc_abi_wasm/outputs/out/noirc_abi_wasm + if-no-files-found: error + compression-level: 0 + + test-noirc-abi: + name: Test noirc abi + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test.sh + + test-noirc-abi-browser: + name: Test noirc abi browser + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noirc-abi-test-browser.sh + + build-noir-js-types: + name: Build noir js types + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-types-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + if-no-files-found: error + compression-level: 0 + + build-barretenberg-backend: + name: Build Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + if-no-files-found: error + compression-level: 0 + + test-barretenberg-backend: + name: Test Barretenberg backend + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-noir-js-types, build-barretenberg-backend] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download artifact + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: /usr/src/noir/tooling/noir_js_types/lib/ + - name: Download Backend barretenberg + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/backend-barretenberg-test.sh + + build-noir_js: + name: Build noirjs + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-barretenberg-backend, build-noir-js-types] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + + test-noir_js: + name: Test noirjs + runs-on: ubuntu-latest + needs: [ + build-base-js, + build-noirc-abi, + artifact-nargo, + build-acvm_js, + build-barretenberg-backend, + build-noir_js, + build-noir-js-types + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: | + /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: | + /usr/src/noir/acvm-repo/acvm_js + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-js-test.sh + + build-noir_codegen: + name: Build noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi package + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Build + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-build.sh + - name: Artifact + uses: actions/upload-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + + test-noir_codegen: + name: Test noir codegen + runs-on: ubuntu-latest + needs: [build-base-js, artifact-nargo, build-noirc-abi, build-acvm_js, build-noir-js-types, build-noir_js, build-noir_codegen] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download noir codegen + uses: actions/download-artifact@v4 + with: + name: noir_codegen + path: + /usr/src/noir/tooling/noir_codegen/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/noir-codegen-test.sh + + test-integration: + name: Integration test + runs-on: ubuntu-latest + needs: [ + build-base-js, + artifact-nargo, + build-noir-wasm, + build-noirc-abi, + build-acvm_js, + build-noir-js-types, + build-noir_js, + build-barretenberg-backend + ] + container: + image: ghcr.io/noir-lang/noir:${{ github.sha }}-js + credentials: + username: ${{ github.actor }} + password: ${{ secrets.github_token }} + steps: + - name: Download nargo + uses: actions/download-artifact@v4 + with: + name: nargo + path: /usr/src/noir/target/release + - name: Prep downloaded artifact + run: | + chmod +x /usr/src/noir/target/release/nargo + - name: Download noir wasm + uses: actions/download-artifact@v4 + with: + name: noir_wasm + path: /usr/src/noir/compiler/wasm + - name: Download noirc abi + uses: actions/download-artifact@v4 + with: + name: noirc_abi_wasm + path: /usr/src/noir/tooling/noirc_abi_wasm + - name: Download acvm js + uses: actions/download-artifact@v4 + with: + name: acvm_js + path: /usr/src/noir/acvm-repo/acvm_js + - name: Download noir js types + uses: actions/download-artifact@v4 + with: + name: noir-js-types + path: | + /usr/src/noir/tooling/noir_js_types/lib + - name: Download noir js + uses: actions/download-artifact@v4 + with: + name: noir_js + path: + /usr/src/noir/tooling/noir_js/lib + - name: Download Barretenberg backend + uses: actions/download-artifact@v4 + with: + name: barretenberg-backend + path: + /usr/src/noir/tooling/noir_js_backend_barretenberg/lib + - name: Test + working-directory: /usr/src/noir + run: | + ./.github/scripts/integration-test.sh + + tests-end: + name: End + runs-on: ubuntu-latest + if: ${{ always() }} + needs: + - test-nargo + - test-noirc-abi + - test-noirc-abi-browser + - test-noir-wasm + - test-noir-wasm-browser + - test-integration + - test-noir_codegen + - test-acvm_js + - test-acvm_js-browser + - test-barretenberg-backend + - test-noir_js + + steps: + - name: Report overall success + run: | + if [[ $FAIL == true ]]; then + exit 1 + else + exit 0 + fi + env: + FAIL: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'skipped') }} diff --git a/.github/workflows/docs-dead-links.yml b/.github/workflows/docs-dead-links.yml new file mode 100644 index 00000000000..40e948fe2c1 --- /dev/null +++ b/.github/workflows/docs-dead-links.yml @@ -0,0 +1,35 @@ +name: Check Markdown links + +on: + schedule: + # Run a check at 9 AM UTC + - cron: "0 9 * * *" + +# This will cancel previous runs when a branch or PR is updated +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} + cancel-in-progress: true + + +jobs: + markdown-link-check: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@master + - uses: gaurav-nelson/github-action-markdown-link-check@v1 + with: + use-quiet-mode: 'yes' + config-file: ./docs/link-check.config.json + folder-path: ./docs + + # Raise an issue if the previous step failed due to dead links being found + - name: Alert on dead links + uses: JasonEtco/create-an-issue@v2 + if: ${{ failure() }} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WORKFLOW_NAME: ${{ github.workflow }} + WORKFLOW_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + update_existing: true + filename: .github/DEAD_LINKS_IN_DOCS.md diff --git a/.github/workflows/docs-pr.yml b/.github/workflows/docs-pr.yml index a16487a49ef..f4a1be826a8 100644 --- a/.github/workflows/docs-pr.yml +++ b/.github/workflows/docs-pr.yml @@ -11,7 +11,7 @@ jobs: steps: - name: Check if label is present id: check-labels - uses: actions/github-script@v3 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | @@ -21,10 +21,11 @@ jobs: } // Fetch the list of files changed in the PR - const { data: files } = await github.pulls.listFiles({ + const { data: files } = await github.rest.pulls.listFiles({ owner: context.repo.owner, repo: context.repo.repo, - pull_number: context.issue.number + pull_number: context.issue.number, + per_page: 100 }); // Check if any file is within the 'docs' folder @@ -33,13 +34,13 @@ jobs: - name: Add label if not present if: steps.check-labels.outputs.result == 'true' - uses: actions/github-script@v3 + uses: actions/github-script@v7.0.1 with: github-token: ${{ secrets.GITHUB_TOKEN }} script: | const labels = context.payload.pull_request.labels.map(label => label.name); if (!labels.includes('documentation')) { - github.issues.addLabels({ + github.rest.issues.addLabels({ owner: context.repo.owner, repo: context.repo.repo, issue_number: context.issue.number, @@ -47,20 +48,14 @@ jobs: }) } - build_and_deploy_preview: + build_preview: runs-on: ubuntu-latest - permissions: - pull-requests: write - needs: add_label - if: needs.add_label.outputs.has_label == 'true' steps: - name: Checkout code uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' + - name: Install Yarn dependencies + uses: ./.github/actions/setup - name: Install wasm-bindgen-cli uses: taiki-e/install-action@v2 @@ -71,13 +66,34 @@ jobs: run: | npm i wasm-opt -g - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - name: Build docs run: - yarn workspaces foreach -Rt run build + yarn workspaces foreach -Rpt --from docs run build + + - name: Upload artifact + uses: actions/upload-artifact@v3 + with: + name: docs + path: ./docs/build/ + retention-days: 3 + + deploy_preview: + needs: [build_preview, add_label] + runs-on: ubuntu-latest + permissions: + pull-requests: write + if: needs.add_label.outputs.has_label == 'true' + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Download built docs + uses: actions/download-artifact@v3 + with: + name: docs + path: ./docs/build + - name: Deploy to Netlify uses: nwtgck/actions-netlify@v2.1 with: diff --git a/.github/workflows/publish-acvm.yml b/.github/workflows/publish-acvm.yml index 06876f27c8d..0251aaa0377 100644 --- a/.github/workflows/publish-acvm.yml +++ b/.github/workflows/publish-acvm.yml @@ -45,15 +45,9 @@ jobs: env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} - - name: Publish barretenberg_blackbox_solver + - name: Publish bn254_blackbox_solver run: | - cargo publish --package barretenberg_blackbox_solver - env: - CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} - - - name: Publish acvm_stdlib - run: | - cargo publish --package acvm_stdlib + cargo publish --package bn254_blackbox_solver env: CARGO_REGISTRY_TOKEN: ${{ secrets.ACVM_CRATES_IO_TOKEN }} diff --git a/.github/workflows/publish-docs.yml b/.github/workflows/publish-docs.yml index 4ef7dd89777..231b57550c9 100644 --- a/.github/workflows/publish-docs.yml +++ b/.github/workflows/publish-docs.yml @@ -1,12 +1,10 @@ name: Publish documentation on: - workflow_dispatch: - inputs: - noir-ref: - description: The noir reference to checkout - required: false - default: 'master' + push: + branches: + - master + paths: [docs/**] jobs: publish-docs: @@ -16,14 +14,9 @@ jobs: steps: - name: Checkout release branch uses: actions/checkout@v4 - with: - ref: ${{ inputs.noir-ref }} - token: ${{ secrets.NOIR_RELEASES_TOKEN }} - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' + - name: Install Yarn dependencies + uses: ./.github/actions/setup - name: Install wasm-bindgen-cli uses: taiki-e/install-action@v2 @@ -34,9 +27,6 @@ jobs: run: | npm i wasm-opt -g - - name: Install Yarn dependencies - uses: ./.github/actions/setup - - name: Build docs for deploying working-directory: docs run: @@ -51,6 +41,7 @@ jobs: github-token: ${{ secrets.GITHUB_TOKEN }} enable-github-deployment: false deploy-message: "Deploy from GitHub Actions for tag ${{ inputs.noir-ref }}" + enable-commit-comment: false env: NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }} NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }} diff --git a/.github/workflows/publish-nargo.yml b/.github/workflows/publish-nargo.yml index 3bcc9a13570..fc089008657 100644 --- a/.github/workflows/publish-nargo.yml +++ b/.github/workflows/publish-nargo.yml @@ -73,16 +73,6 @@ jobs: path: ./dist/* retention-days: 3 - - name: Install Yarn dependencies - if: matrix.target == 'x86_64-apple-darwin' - uses: ./.github/actions/setup - - - name: Test built artifact - if: matrix.target == 'x86_64-apple-darwin' - run: | - cp ./target/${{ matrix.target }}/release/nargo ~/.cargo/bin/ - yarn workspace release-tests test - - name: Upload binaries to release tag uses: svenstaro/upload-release-action@v2 if: ${{ inputs.publish || github.event_name == 'schedule' }} @@ -161,16 +151,6 @@ jobs: path: ./dist/* retention-days: 3 - - name: Install Yarn dependencies - if: startsWith(matrix.target, 'x86_64-unknown-linux') - uses: ./.github/actions/setup - - - name: Test built artifact - if: startsWith(matrix.target, 'x86_64-unknown-linux') - run: | - cp ./target/${{ matrix.target }}/release/nargo ~/.cargo/bin/ - yarn workspace release-tests test - - name: Upload binaries to release tag uses: svenstaro/upload-release-action@v2 if: ${{ inputs.publish }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fcf630345ac..22a733b38c5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -11,6 +11,7 @@ jobs: outputs: release-pr: ${{ steps.release.outputs.pr }} tag-name: ${{ steps.release.outputs.tag_name }} + pending-release-semver: v${{ steps.release.outputs.major }}.${{steps.release.outputs.minor}}.${{steps.release.outputs.patch}} runs-on: ubuntu-latest steps: - name: Run release-please @@ -39,36 +40,19 @@ jobs: run: | ./scripts/update-acvm-workspace-versions.sh - - name: Configure git - run: | - git config user.name kevaundray - git config user.email kevtheappdev@gmail.com - - - name: Commit updates - run: | - git add Cargo.toml - git commit -m 'chore: Update root workspace acvm versions' - git push - - update-lockfile: - name: Update lockfile - needs: [release-please,update-acvm-workspace-package-versions] - if: ${{ needs.release-please.outputs.release-pr }} - runs-on: ubuntu-latest - steps: - - name: Checkout release branch - uses: actions/checkout@v4 - with: - ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} - token: ${{ secrets.NOIR_RELEASES_TOKEN }} - - - name: Setup toolchain - uses: dtolnay/rust-toolchain@1.65.0 - - name: Update lockfile run: | - cargo update --workspace + cargo update --workspace + - uses: actions/setup-node@v3 + with: + node-version: 18.17.1 + cache: 'yarn' + cache-dependency-path: 'yarn.lock' + + - name: Update yarn.lock + run: yarn + - name: Configure git run: | git config user.name kevaundray @@ -76,15 +60,16 @@ jobs: - name: Commit updates run: | - git add Cargo.lock - git commit -m 'chore: Update lockfile' + git add . + git commit -m 'chore: Update root workspace acvm versions and lockfile' git push update-docs: name: Update docs - needs: [release-please, update-lockfile] - if: ${{ needs.release-please.outputs.tag-name }} + needs: [release-please, update-acvm-workspace-package-versions] + if: ${{ needs.release-please.outputs.release-pr }} runs-on: ubuntu-latest + steps: - name: Checkout release branch uses: actions/checkout@v4 @@ -92,17 +77,12 @@ jobs: ref: ${{ fromJSON(needs.release-please.outputs.release-pr).headBranchName }} token: ${{ secrets.NOIR_RELEASES_TOKEN }} - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: '18' - - name: Install Yarn dependencies uses: ./.github/actions/setup - name: Cut a new version working-directory: ./docs - run: yarn docusaurus docs:version ${{ needs.release-please.outputs.tag-name }} + run: yarn docusaurus docs:version ${{ needs.release-please.outputs.pending-release-semver }} - name: Configure git run: | @@ -112,7 +92,7 @@ jobs: - name: Commit new documentation version run: | git add . - git commit -m "chore(docs): cut new docs version for tag ${{ needs.release-please.outputs.tag-name }}" + git commit -m "chore(docs): cut new docs version for tag ${{ needs.release-please.outputs.pending-release-semver }}" git push build-binaries: @@ -144,21 +124,6 @@ jobs: token: ${{ secrets.NOIR_REPO_TOKEN }} inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}", "npm-tag": "latest" }' - publish-docs: - name: Publish docs - needs: [release-please] - if: ${{ needs.release-please.outputs.tag-name }} - runs-on: ubuntu-latest - - steps: - - name: Dispatch to publish-docs - uses: benc-uk/workflow-dispatch@v1 - with: - workflow: publish-docs.yml - ref: master - token: ${{ secrets.NOIR_REPO_TOKEN }} - inputs: '{ "noir-ref": "${{ needs.release-please.outputs.tag-name }}" }' - publish-acvm: name: Publish acvm needs: [release-please] diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml index 89fcdd12d12..83d67325775 100644 --- a/.github/workflows/spellcheck.yml +++ b/.github/workflows/spellcheck.yml @@ -8,6 +8,32 @@ concurrency: cancel-in-progress: true jobs: - spellcheck: - name: Spellcheck - uses: noir-lang/.github/.github/workflows/spellcheck.yml@main + code: + name: Code + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: false # Do not fail, if a spelling mistake is found (This can be annoying for contributors) + incremental_files_only: true # Run this action on files which have changed in PR + files: | + **/*.{md,rs} + + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - name: Checkout sources + uses: actions/checkout@v4 + + - name: Check spelling + uses: streetsidesoftware/cspell-action@v2 + with: + strict: true # Documentation has higher standards for correctness. + incremental_files_only: true # Run this action on files which have changed in PR + files: | + ./docs/**/*.md diff --git a/.github/workflows/test-cargo.yml b/.github/workflows/test-cargo.yml deleted file mode 100644 index 8d414daa75b..00000000000 --- a/.github/workflows/test-cargo.yml +++ /dev/null @@ -1,51 +0,0 @@ -name: Test cargo - -on: - pull_request: - merge_group: - push: - branches: - - master - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - -jobs: - build: - name: Test cargo - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Get current date - id: date - run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE - - name: prepare docker images tags - id: prep - run: | - REGISTRY="ghcr.io" - IMG="${REGISTRY}/${{ github.repository }}" - IMAGE=$(echo "$IMG" | tr '[:upper:]' '[:lower:]') - TAGS="${IMAGE}:${{ github.sha }}" - TAGS="${TAGS},${IMAGE}:latest,${IMAGE}:v${{ steps.date.outputs.date }}" - echo ::set-output name=tags::${TAGS} - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Test cargo - uses: docker/build-push-action@v5 - with: - context: . - file: Dockerfile.ci - tags: ${{ steps.prep.outputs.tags }} - target: test-cargo - cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file diff --git a/.github/workflows/test-js-packages.yml b/.github/workflows/test-js-packages.yml index a298d67a485..addc9ce3d83 100644 --- a/.github/workflows/test-js-packages.yml +++ b/.github/workflows/test-js-packages.yml @@ -47,6 +47,7 @@ jobs: retention-days: 3 build-noir-wasm: + needs: [build-noirc-abi] runs-on: ubuntu-latest timeout-minutes: 30 @@ -54,28 +55,36 @@ jobs: - name: Checkout sources uses: actions/checkout@v4 - - name: Setup Nix - uses: ./.github/actions/nix + - name: Setup toolchain + uses: dtolnay/rust-toolchain@1.71.1 + + - uses: Swatinem/rust-cache@v2 with: - github-token: ${{ secrets.GITHUB_TOKEN }} - nix-cache-name: "noir" - cachix-auth-token: ${{ secrets.CACHIXAUTHTOKEN }} + key: noir-wasm + cache-on-failure: true + save-if: ${{ github.event_name != 'merge_group' }} - - name: Build wasm package - run: | - nix build -L .#noir_wasm + - name: Download noirc_abi_wasm package artifact + uses: actions/download-artifact@v3 + with: + name: noirc_abi_wasm + path: ./tooling/noirc_abi_wasm - - name: Dereference symlink - run: echo "UPLOAD_PATH=$(readlink -f ./result/noir_wasm)" >> $GITHUB_ENV + - name: Install Yarn dependencies + uses: ./.github/actions/setup + + - name: Build noir_wasm + run: yarn workspace @noir-lang/noir_wasm build - name: Upload artifact uses: actions/upload-artifact@v3 with: name: noir_wasm - path: ${{ env.UPLOAD_PATH }} + path: | + ./compiler/wasm/dist + ./compiler/wasm/build retention-days: 3 - build-acvm-js: runs-on: ubuntu-latest timeout-minutes: 30 @@ -289,25 +298,24 @@ jobs: name: noir_wasm path: ./compiler/wasm + - name: Install Yarn dependencies + uses: ./.github/actions/setup + - name: Download nargo binary uses: actions/download-artifact@v3 with: name: nargo path: ./nargo - - name: Compile fixtures with Nargo CLI - working-directory: ./compiler/wasm/fixtures + - name: Set nargo on PATH run: | - nargo_binary=${{ github.workspace }}/nargo/nargo + nargo_binary="${{ github.workspace }}/nargo/nargo" chmod +x $nargo_binary - for dir in $(ls -d */); do - pushd $dir/noir-script - $nargo_binary compile - popd - done + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" - - name: Install Yarn dependencies - uses: ./.github/actions/setup + - name: Build fixtures + run: yarn workspace @noir-lang/noir_wasm test:build_fixtures - name: Install Playwright uses: ./.github/actions/install-playwright @@ -319,7 +327,7 @@ jobs: run: yarn workspace @noir-lang/noir_wasm test:browser test-noir-codegen: - needs: [build-acvm-js, build-noirc-abi] + needs: [build-acvm-js, build-noirc-abi, build-nargo] name: noir_codegen runs-on: ubuntu-latest timeout-minutes: 30 @@ -328,6 +336,12 @@ jobs: - name: Checkout uses: actions/checkout@v4 + - name: Download nargo binary + uses: actions/download-artifact@v3 + with: + name: nargo + path: ./nargo + - name: Download acvm_js package artifact uses: actions/download-artifact@v3 with: @@ -339,6 +353,14 @@ jobs: with: name: noirc_abi_wasm path: ./tooling/noirc_abi_wasm + + - name: Set nargo on PATH + run: | + nargo_binary="${{ github.workspace }}/nargo/nargo" + chmod +x $nargo_binary + echo "$(dirname $nargo_binary)" >> $GITHUB_PATH + export PATH="$PATH:$(dirname $nargo_binary)" + nargo -V - name: Install Yarn dependencies uses: ./.github/actions/setup diff --git a/.github/workflows/test-js.yml b/.github/workflows/test-js.yml deleted file mode 100644 index 2f29e027156..00000000000 --- a/.github/workflows/test-js.yml +++ /dev/null @@ -1,52 +0,0 @@ -name: Test JS packages - -on: - pull_request: - merge_group: - push: - branches: - - master - -# This will cancel previous runs when a branch or PR is updated -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref || github.run_id }} - cancel-in-progress: true - - -jobs: - build: - name: Test JS packages - runs-on: ubuntu-latest - steps: - - name: Checkout code - uses: actions/checkout@v4 - - name: Get current date - id: date - run: echo "date=$(date +'%Y.%m.%d.%H.%M')" >> $GITHUB_STATE - - name: prepare docker images tags - id: prep - run: | - REGISTRY="ghcr.io" - IMG="${REGISTRY}/${{ github.repository }}" - IMAGE=$(echo "$IMG" | tr '[:upper:]' '[:lower:]') - TAGS="${IMAGE}:${{ github.sha }}" - TAGS="${TAGS},${IMAGE}:latest,${IMAGE}:v${{ steps.date.outputs.date }}" - echo ::set-output name=tags::${TAGS} - - name: Set up Docker Buildx - id: buildx - uses: docker/setup-buildx-action@v3 - - name: Login to GitHub Container Registry - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - name: Test JS packages - uses: docker/build-push-action@v5 - with: - context: . - file: Dockerfile.ci - tags: ${{ steps.prep.outputs.tags }} - target: test-js - cache-from: type=gha - cache-to: type=gha,mode=max \ No newline at end of file diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9e3193c22fb..f440a7a2c51 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,4 +1,4 @@ { - ".": "0.20.0", - "acvm-repo": "0.37.0" + ".": "0.23.0", + "acvm-repo": "0.39.0" } \ No newline at end of file diff --git a/.yarnrc.yml b/.yarnrc.yml index fd534a48781..6d27afaac27 100644 --- a/.yarnrc.yml +++ b/.yarnrc.yml @@ -7,3 +7,6 @@ plugins: spec: "@yarnpkg/plugin-workspace-tools" yarnPath: .yarn/releases/yarn-3.6.3.cjs +logFilters: + - code: YN0013 + level: discard diff --git a/CHANGELOG.md b/CHANGELOG.md index 9abb97f6860..af7eb5b2f19 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,174 @@ # Changelog +## [0.23.0](https://github.com/noir-lang/noir/compare/v0.22.0...v0.23.0) (2024-01-22) + + +### ⚠ BREAKING CHANGES + +* Ban nested slices ([#4018](https://github.com/noir-lang/noir/issues/4018)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) +* remove circuit methods from noir_wasm ([#3869](https://github.com/noir-lang/noir/issues/3869)) + +### Features + +* Add `assert_max_bit_size` method to `Field` ([#4016](https://github.com/noir-lang/noir/issues/4016)) ([bc9a44f](https://github.com/noir-lang/noir/commit/bc9a44f285e0569825a307b06ee8acd93461c87e)) +* Add `noir-compiler` checks to `aztec_macros` ([#4031](https://github.com/noir-lang/noir/issues/4031)) ([420a5c7](https://github.com/noir-lang/noir/commit/420a5c74a14dcfeede04337a42282093a7b5e63e)) +* Add a `--force` flag to force a full recompile ([#4054](https://github.com/noir-lang/noir/issues/4054)) ([27a8e68](https://github.com/noir-lang/noir/commit/27a8e6864643d81d96e84990e2e26cd16596a695)) +* Add dependency resolver for `noir_wasm` and implement `FileManager` for consistency with native interface ([#3891](https://github.com/noir-lang/noir/issues/3891)) ([c29c7d7](https://github.com/noir-lang/noir/commit/c29c7d7c9615b9f45c696b1bdc1c497d55469dfa)) +* Add foreign call support to `noir_codegen` functions ([#3933](https://github.com/noir-lang/noir/issues/3933)) ([e5e52a8](https://github.com/noir-lang/noir/commit/e5e52a81b31d7735b680e97a9bef89a010a99763)) +* Add MVP `nargo export` command ([#3870](https://github.com/noir-lang/noir/issues/3870)) ([fbb51ed](https://github.com/noir-lang/noir/commit/fbb51ed33e9e4d9105d8946cdfc4ea387c85258e)) +* Add support for codegenning multiple functions which use the same structs in their interface ([#3868](https://github.com/noir-lang/noir/issues/3868)) ([1dcfcc5](https://github.com/noir-lang/noir/commit/1dcfcc5265f618685a783504b1d4be213e4cda2d)) +* Added efficient field comparisons for bn254 ([#4042](https://github.com/noir-lang/noir/issues/4042)) ([1f9cad0](https://github.com/noir-lang/noir/commit/1f9cad00c57ea257f57419d2446a46938beb19f9)) +* Assert maximum bit size when creating a U128 from an integer ([#4024](https://github.com/noir-lang/noir/issues/4024)) ([8f9c7e4](https://github.com/noir-lang/noir/commit/8f9c7e4de9f2ae5b39714d8e0d26b2befcd11c4a)) +* Avoid unnecessary range checks by inspecting instructions for casts ([#4039](https://github.com/noir-lang/noir/issues/4039)) ([378c18e](https://github.com/noir-lang/noir/commit/378c18eb42d75852b97f849d05c9e3f650601339)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Bubble up `Instruction::Constrain`s to be applied as early as possible. ([#4065](https://github.com/noir-lang/noir/issues/4065)) ([66f5cdd](https://github.com/noir-lang/noir/commit/66f5cddc133ba0311028eba96c0ff6ec2ecaee59)) +* Cached LSP parsing ([#4083](https://github.com/noir-lang/noir/issues/4083)) ([b4f724e](https://github.com/noir-lang/noir/commit/b4f724e848b291a733e417c394ac3fc7649c08c5)) +* Comparison for signed integers ([#3873](https://github.com/noir-lang/noir/issues/3873)) ([bcbd49b](https://github.com/noir-lang/noir/commit/bcbd49b8b44749e149f83c1240094fa2f0a19087)) +* Decompose `Instruction::Cast` to have an explicit truncation instruction ([#3946](https://github.com/noir-lang/noir/issues/3946)) ([35f18ef](https://github.com/noir-lang/noir/commit/35f18ef4d7c8041e3cf622a5643748d0793c2aa6)) +* Decompose `Instruction::Constrain` into multiple more basic constraints ([#3892](https://github.com/noir-lang/noir/issues/3892)) ([51cf9d3](https://github.com/noir-lang/noir/commit/51cf9d37c8b9fbb14bb54b178d93129a7563e131)) +* Docker testing flow ([#3895](https://github.com/noir-lang/noir/issues/3895)) ([179c90d](https://github.com/noir-lang/noir/commit/179c90dc3263c85de105c57925d9c5894427e8e1)) +* Extract parsing to its own pass and do it in parallel ([#4063](https://github.com/noir-lang/noir/issues/4063)) ([569cbbc](https://github.com/noir-lang/noir/commit/569cbbc231a242c32821cba56f3649f3228a1cc7)) +* Implement `Eq` trait on curve points ([#3944](https://github.com/noir-lang/noir/issues/3944)) ([abf751a](https://github.com/noir-lang/noir/commit/abf751ab7f57f87520be16b2bc6168efdf95a430)) +* Implement DAP protocol in Nargo ([#3627](https://github.com/noir-lang/noir/issues/3627)) ([13834d4](https://github.com/noir-lang/noir/commit/13834d43bd876909cb50494a41b42297f7e6375b)) +* Implement generic traits ([#4000](https://github.com/noir-lang/noir/issues/4000)) ([916fd15](https://github.com/noir-lang/noir/commit/916fd158aa361ac80d32767f575ad896c3462b15)) +* Implement Operator Overloading ([#3931](https://github.com/noir-lang/noir/issues/3931)) ([4b16090](https://github.com/noir-lang/noir/commit/4b16090beecd0fcdd41c9e7b8f615c4625c26a5b)) +* **lsp:** Cache definitions for goto requests ([#3930](https://github.com/noir-lang/noir/issues/3930)) ([4a2140f](https://github.com/noir-lang/noir/commit/4a2140f1f36bbe3afbc006f8db74820308ae27d5)) +* **lsp:** Goto global ([#4043](https://github.com/noir-lang/noir/issues/4043)) ([15237b3](https://github.com/noir-lang/noir/commit/15237b34dbce5ea54973a178449e67cca8ac4f9d)) +* **lsp:** Goto struct member inside Impl method ([#3918](https://github.com/noir-lang/noir/issues/3918)) ([99c2c5a](https://github.com/noir-lang/noir/commit/99c2c5a2c2c0da6bad783b60d9e3de8d9a1f4ee4)) +* **lsp:** Goto trait from trait impl ([#3956](https://github.com/noir-lang/noir/issues/3956)) ([eb566e2](https://github.com/noir-lang/noir/commit/eb566e2125e847a3e3efbd2bc15a88a1c454a7df)) +* **lsp:** Goto trait method declaration ([#3991](https://github.com/noir-lang/noir/issues/3991)) ([eb79166](https://github.com/noir-lang/noir/commit/eb79166f7d2b7aa45c9c6c0aa37db1c0a5dfa00f)) +* **lsp:** Goto type alias ([#4061](https://github.com/noir-lang/noir/issues/4061)) ([dc83385](https://github.com/noir-lang/noir/commit/dc83385e9fe5766cd8218265be38c54243cae76e)) +* **lsp:** Goto type definition ([#4029](https://github.com/noir-lang/noir/issues/4029)) ([8bb4ddf](https://github.com/noir-lang/noir/commit/8bb4ddfdd81d491ff713a056a7eae522f329d173)) +* **lsp:** Re-add code lens feature with improved performance ([#3829](https://github.com/noir-lang/noir/issues/3829)) ([8f5cd6c](https://github.com/noir-lang/noir/commit/8f5cd6c0b641b3970bf626e8910b2a4c7cc8c310)) +* Optimize array ops for arrays of structs ([#4027](https://github.com/noir-lang/noir/issues/4027)) ([c9ec0d8](https://github.com/noir-lang/noir/commit/c9ec0d811ddc8653201ed765b51585a7c1b946fb)) +* Optimize logic gate ACIR-gen ([#3897](https://github.com/noir-lang/noir/issues/3897)) ([926460a](https://github.com/noir-lang/noir/commit/926460a0c70e21e2f4720148cf424e44ab9b0678)) +* Prefer `AcirContext`-native methods for performing logic operations ([#3898](https://github.com/noir-lang/noir/issues/3898)) ([0ec39b8](https://github.com/noir-lang/noir/commit/0ec39b8396084ed1e7f20609c8ad8a5844a86674)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Remove truncation from brillig casts ([#3997](https://github.com/noir-lang/noir/issues/3997)) ([857ff97](https://github.com/noir-lang/noir/commit/857ff97b196174a0999f0fe7e387bfca5c3b7cd3)) +* Remove truncations which can be seen to be noops using type information ([#3953](https://github.com/noir-lang/noir/issues/3953)) ([cc3c2c2](https://github.com/noir-lang/noir/commit/cc3c2c22644f0b5d8369bad2362ea6e9112a0713)) +* Remove unnecessary predicate from `Lt` instruction ([#3922](https://github.com/noir-lang/noir/issues/3922)) ([a63433f](https://github.com/noir-lang/noir/commit/a63433fb8747722ec3cf2c6eb85d34e5b04bc15c)) +* Simplify chains of casts to be all in terms of the original `ValueId` ([#3984](https://github.com/noir-lang/noir/issues/3984)) ([2384d3e](https://github.com/noir-lang/noir/commit/2384d3e97af24a8718fbf57f6b276a5ce1de06fe)) +* Simplify multiplications by `0` or `1` in ACIR gen ([#3924](https://github.com/noir-lang/noir/issues/3924)) ([e58844d](https://github.com/noir-lang/noir/commit/e58844daf9f040626a3a7595f8c4f831e48a4037)) +* Support for u128 ([#3913](https://github.com/noir-lang/noir/issues/3913)) ([b4911dc](https://github.com/noir-lang/noir/commit/b4911dcf676f0925ac631ba6f60fc9c4945b2fee)) +* Support printing more types ([#4071](https://github.com/noir-lang/noir/issues/4071)) ([f5c4632](https://github.com/noir-lang/noir/commit/f5c4632e174beba508e1e31d0e2ae3f6d028ae2c)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) +* Use singleton `WasmBlackBoxFunctionSolver` in `noir_js` ([#3966](https://github.com/noir-lang/noir/issues/3966)) ([10b28de](https://github.com/noir-lang/noir/commit/10b28def4d74822b7af2c19a1cc693788272b00b)) + + +### Bug Fixes + +* Acir gen doesn't panic on unsupported BB function ([#3866](https://github.com/noir-lang/noir/issues/3866)) ([34fd978](https://github.com/noir-lang/noir/commit/34fd978d206789a9e9f5167bfd690a34386834d0)) +* Allow abi encoding arrays of structs from JS ([#3867](https://github.com/noir-lang/noir/issues/3867)) ([9b713f8](https://github.com/noir-lang/noir/commit/9b713f8cf599df262a12ec1098136c50b2b46766)) +* Allow abi encoding tuples from JS ([#3894](https://github.com/noir-lang/noir/issues/3894)) ([f7fa181](https://github.com/noir-lang/noir/commit/f7fa1811ad2591020c914976f26e2f11a91cd177)) +* Allow ast when macro errors ([#4005](https://github.com/noir-lang/noir/issues/4005)) ([efccec3](https://github.com/noir-lang/noir/commit/efccec3c24eb093fba99b1c29f01a78aae5776d0)) +* Allow lsp to run inside of a docker container ([#3876](https://github.com/noir-lang/noir/issues/3876)) ([2529977](https://github.com/noir-lang/noir/commit/2529977acd684219f57ef086415557cc07af043b)) +* Bit-shifts for signed integers ([#3890](https://github.com/noir-lang/noir/issues/3890)) ([6ddd98a](https://github.com/noir-lang/noir/commit/6ddd98ab7d3fefde491cf12b785f76bf0585609e)) +* Checks for cyclic dependencies ([#3699](https://github.com/noir-lang/noir/issues/3699)) ([642011a](https://github.com/noir-lang/noir/commit/642011ab6ebbe8f012eda1da1abbf8660500723d)) +* **debugger:** Crash when stepping through locations spanning multiple lines ([#3920](https://github.com/noir-lang/noir/issues/3920)) ([223e860](https://github.com/noir-lang/noir/commit/223e860975c2698bd5043340b937de74552ec15b)) +* Don't fail if no tests and the user didn't provide a pattern ([#3864](https://github.com/noir-lang/noir/issues/3864)) ([decbd0f](https://github.com/noir-lang/noir/commit/decbd0f0c019844cd2b235e7804d2f6ba7b23897)) +* Fix advisory issue in cargo-deny ([#4077](https://github.com/noir-lang/noir/issues/4077)) ([19baea0](https://github.com/noir-lang/noir/commit/19baea0d18e2d26bd04b649f79dd8e681488d1dc)) +* Fixing dark mode background on the CTA button ([#3882](https://github.com/noir-lang/noir/issues/3882)) ([57eae42](https://github.com/noir-lang/noir/commit/57eae42080d6a928e8010c6bc77489964a5777ef)) +* Fixup exports from `noir_wasm` ([#4022](https://github.com/noir-lang/noir/issues/4022)) ([358cdd2](https://github.com/noir-lang/noir/commit/358cdd2725444091b3322c47754e3cbd9b1d3614)) +* Handle multiple imports in the same file ([#3903](https://github.com/noir-lang/noir/issues/3903)) ([219423e](https://github.com/noir-lang/noir/commit/219423eb87fa12bd8cca2a6fd2ce4c06e308783c)) +* Hoist constraints on inputs to top of program ([#4076](https://github.com/noir-lang/noir/issues/4076)) ([447aa34](https://github.com/noir-lang/noir/commit/447aa343555cbd5a7cd735876e08f43271ecdd40)) +* Implement missing codegen for `BlackBoxFunc::EcdsaSecp256r1` in brillig ([#3943](https://github.com/noir-lang/noir/issues/3943)) ([2c5eceb](https://github.com/noir-lang/noir/commit/2c5eceb04ab6bc38e954492642121c7fe3da866f)) +* Improve `nargo test` output ([#3973](https://github.com/noir-lang/noir/issues/3973)) ([3ab5ff4](https://github.com/noir-lang/noir/commit/3ab5ff431145a1f747b698caed15caebaa145f04)) +* Make `constant_to_radix` emit a slice instead of an array ([#4049](https://github.com/noir-lang/noir/issues/4049)) ([5cdb1d0](https://github.com/noir-lang/noir/commit/5cdb1d0dabe2e38a1610f718747cc2fb4263339d)) +* Operator overloading & static trait method references resolving to generic impls ([#3967](https://github.com/noir-lang/noir/issues/3967)) ([f1de8fa](https://github.com/noir-lang/noir/commit/f1de8fa3247bcee624bcd7a0f89fe7c7cd8430f1)) +* Preserve brillig entrypoint functions without arguments ([#3951](https://github.com/noir-lang/noir/issues/3951)) ([1111465](https://github.com/noir-lang/noir/commit/1111465551557ed9e97e4b43d6eccc4b5896a39f)) +* Prevent `Instruction::Constrain`s for non-primitive types ([#3916](https://github.com/noir-lang/noir/issues/3916)) ([467948f](https://github.com/noir-lang/noir/commit/467948f9ee9ae65b4e2badaa1d15835fced3e835)) +* Remove panic for adding an invalid crate name in wasm compiler ([#3977](https://github.com/noir-lang/noir/issues/3977)) ([7a1baa5](https://github.com/noir-lang/noir/commit/7a1baa56faa2deb385ef1b6c9da9073dafd5a376)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) +* Search all levels of struct nesting before codegenning primitive types ([#3970](https://github.com/noir-lang/noir/issues/3970)) ([13ae014](https://github.com/noir-lang/noir/commit/13ae014ddcbd9eddb401c563b95053f7a1a89f1c)) +* Update generics docs to mention we have traits now ([#3980](https://github.com/noir-lang/noir/issues/3980)) ([c2acdf1](https://github.com/noir-lang/noir/commit/c2acdf1793a67abc9a074457e057a44da3b82c39)) + + +### Miscellaneous Chores + +* Ban nested slices ([#4018](https://github.com/noir-lang/noir/issues/4018)) ([f8a1fb7](https://github.com/noir-lang/noir/commit/f8a1fb7eed1ae4a9779eb16b142a64094aa603c6)) +* Remove circuit methods from noir_wasm ([#3869](https://github.com/noir-lang/noir/issues/3869)) ([12d884e](https://github.com/noir-lang/noir/commit/12d884e2b74efab7257626d8878ea1a7455ecf85)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) ([836f171](https://github.com/noir-lang/noir/commit/836f17145c2901060706294461c2d282dd121b3e)) + +## [0.22.0](https://github.com/noir-lang/noir/compare/v0.21.0...v0.22.0) (2023-12-18) + + +### ⚠ BREAKING CHANGES + +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove backend field from artifacts ([#3819](https://github.com/noir-lang/noir/issues/3819)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Add context-centric based API for noir_wasm ([#3798](https://github.com/noir-lang/noir/issues/3798)) ([19155d0](https://github.com/noir-lang/noir/commit/19155d02a1248c85e94f14a2a0bb383a4edeb16f)) + + +### Miscellaneous Chores + +* Remove backend field from artifacts ([#3819](https://github.com/noir-lang/noir/issues/3819)) ([fa1cf5f](https://github.com/noir-lang/noir/commit/fa1cf5f03aa21b001c31ebb9ce405e3c2859bb57)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) + +## [0.21.0](https://github.com/noir-lang/noir/compare/v0.20.0...v0.21.0) (2023-12-15) + + +### ⚠ BREAKING CHANGES + +* remove unused `source-resolver` package ([#3791](https://github.com/noir-lang/noir/issues/3791)) +* Make file manager read-only to the compiler ([#3760](https://github.com/noir-lang/noir/issues/3760)) + +### Features + +* Add `prelude.nr` ([#3693](https://github.com/noir-lang/noir/issues/3693)) ([5f0f81f](https://github.com/noir-lang/noir/commit/5f0f81f7f49b021880e0bff648aa6c6d0fede46c)) +* Add some traits to the stdlib ([#3796](https://github.com/noir-lang/noir/issues/3796)) ([8e11352](https://github.com/noir-lang/noir/commit/8e113526a2d78d27ed4e489f16d5604a2aaa18ea)) +* Add support for writing tracing debug info to file ([#3790](https://github.com/noir-lang/noir/issues/3790)) ([98a5004](https://github.com/noir-lang/noir/commit/98a500436a68652a367ccbf77e32f8544aff73bc)) +* Allow passing custom foreign call handlers when creating proofs in NoirJS ([#3764](https://github.com/noir-lang/noir/issues/3764)) ([6076e08](https://github.com/noir-lang/noir/commit/6076e08a0814bb6f3836af3c65a7b40c066b9494)) +* Allow underscores in integer literals ([#3746](https://github.com/noir-lang/noir/issues/3746)) ([2c06a64](https://github.com/noir-lang/noir/commit/2c06a64e502bac6839375c5636d39a172a609a5f)) +* Avoid overflow checks on boolean multiplication ([#3745](https://github.com/noir-lang/noir/issues/3745)) ([9b5b686](https://github.com/noir-lang/noir/commit/9b5b6861c3aa0e154e17598ac9994d3970f0e752)) +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Dockerfile to test cargo and JS packages ([#3684](https://github.com/noir-lang/noir/issues/3684)) ([513d619](https://github.com/noir-lang/noir/commit/513d6196a0766082a3c88a4050498bae2cfa7e13)) +* Docs landing page with a playground ([#3667](https://github.com/noir-lang/noir/issues/3667)) ([9a95fbe](https://github.com/noir-lang/noir/commit/9a95fbeefb2ecd5a898006530a1e054cd345bfe8)) +* Enhance test information output ([#3696](https://github.com/noir-lang/noir/issues/3696)) ([468fbbc](https://github.com/noir-lang/noir/commit/468fbbca43e33b23bc662bf1d36dcb79830a291c)) +* Implement print without newline ([#3650](https://github.com/noir-lang/noir/issues/3650)) ([9827dfe](https://github.com/noir-lang/noir/commit/9827dfe51118ba55da6da51ab8bf45cffd2ca756)) +* **lsp:** Add goto definition for locals ([#3705](https://github.com/noir-lang/noir/issues/3705)) ([9dd465c](https://github.com/noir-lang/noir/commit/9dd465c23e286481fa9a35632d133901f86d5883)) +* **lsp:** Add goto definition for structs ([#3718](https://github.com/noir-lang/noir/issues/3718)) ([a576c5b](https://github.com/noir-lang/noir/commit/a576c5bba6ab92eb4798715a43475808ac954fba)) +* Optimize out unnecessary truncation instructions ([#3717](https://github.com/noir-lang/noir/issues/3717)) ([c9c72ae](https://github.com/noir-lang/noir/commit/c9c72ae7b80aa9504a082dd083b19d4b80d954c5)) +* Remove experimental feature warning for traits ([#3783](https://github.com/noir-lang/noir/issues/3783)) ([cb52242](https://github.com/noir-lang/noir/commit/cb522429592477c2b0544f3b3026a1a946b0e5b1)) +* Reorganizing docs to fit diataxis framework ([#3711](https://github.com/noir-lang/noir/issues/3711)) ([54a1ed5](https://github.com/noir-lang/noir/commit/54a1ed58c991eefa7ac9304b894c7046c294487b)) +* Simplify explicit equality assertions to assert equality directly ([#3708](https://github.com/noir-lang/noir/issues/3708)) ([2fc46e2](https://github.com/noir-lang/noir/commit/2fc46e2269bba8d9ad6ae5fcea10e64dce9b3745)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* `try_unify` no longer binds types on failure ([#3697](https://github.com/noir-lang/noir/issues/3697)) ([f03e581](https://github.com/noir-lang/noir/commit/f03e5812439bdf9d1aedc69debdc50ba5dba2049)) +* Add missing assertion to test ([#3765](https://github.com/noir-lang/noir/issues/3765)) ([bcbe116](https://github.com/noir-lang/noir/commit/bcbe11613b7205476a49ad0d588b868b4fc43ba1)) +* Add negative integer literals ([#3690](https://github.com/noir-lang/noir/issues/3690)) ([8b3a68f](https://github.com/noir-lang/noir/commit/8b3a68f5286c09e1f612dbcfff3fe41023ab7109)) +* Allow trait method references from the trait name ([#3774](https://github.com/noir-lang/noir/issues/3774)) ([cfa34d4](https://github.com/noir-lang/noir/commit/cfa34d4d913dbd35f8329430e0d58830e069d6ff)) +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) +* **docs:** Trigger `update-docs` workflow when the `release-please` PR gets merged and not on every merge to master ([#3677](https://github.com/noir-lang/noir/issues/3677)) ([9a3d1d2](https://github.com/noir-lang/noir/commit/9a3d1d2cf647cd583344f8da122fed1acbca9397)) +* Initialize strings as u8 array ([#3682](https://github.com/noir-lang/noir/issues/3682)) ([8da40b7](https://github.com/noir-lang/noir/commit/8da40b75a36ebac51d5377311db3c55fa339dcac)) +* **lsp:** Package resolution on save ([#3794](https://github.com/noir-lang/noir/issues/3794)) ([14f2fff](https://github.com/noir-lang/noir/commit/14f2fffeb3de5f653c11694ee3c5e5d62aaa34ec)) +* Parse negative integer literals ([#3698](https://github.com/noir-lang/noir/issues/3698)) ([463ab06](https://github.com/noir-lang/noir/commit/463ab060075db1915127c3f6cef11bfed9d40109)) +* Pub is required on return for entry points ([#3616](https://github.com/noir-lang/noir/issues/3616)) ([7f1d796](https://github.com/noir-lang/noir/commit/7f1d7968368734e02b152e2e907dc7af9e1604c8)) +* Remove `noirc_driver/aztec` feature flag in docker ([#3784](https://github.com/noir-lang/noir/issues/3784)) ([a48d562](https://github.com/noir-lang/noir/commit/a48d562b59aa2009a9c9b65dd71e11cdd8d06cf0)) +* Remove include-keys option ([#3692](https://github.com/noir-lang/noir/issues/3692)) ([95d7ce2](https://github.com/noir-lang/noir/commit/95d7ce21016e3603bf279efb970536ad32d89a3a)) +* Revert change to modify version in workspace file for acvm dependencies ([#3673](https://github.com/noir-lang/noir/issues/3673)) ([0696f75](https://github.com/noir-lang/noir/commit/0696f755364293bcc7ebc7a0def0dcafede2e543)) +* Sequence update-lockfile workflow so it gets modified after the ACVM version in the root has been changed ([#3676](https://github.com/noir-lang/noir/issues/3676)) ([c00cd85](https://github.com/noir-lang/noir/commit/c00cd8537836f8e4d8559b01d16dfdd1b5cad519)) +* **ssa:** Handle array arguments to side effectual constrain statements ([#3740](https://github.com/noir-lang/noir/issues/3740)) ([028d65e](https://github.com/noir-lang/noir/commit/028d65ea71f9c11e69784d06e0f9768668455f83)) +* Stop cloning Traits! ([#3736](https://github.com/noir-lang/noir/issues/3736)) ([fcff412](https://github.com/noir-lang/noir/commit/fcff412bb39a04a5c88506ae5a5ee2fbdefd93ef)) +* Stop issuing unused variable warnings for variables in trait definitions ([#3797](https://github.com/noir-lang/noir/issues/3797)) ([0bb44c3](https://github.com/noir-lang/noir/commit/0bb44c3bbc63d385d77d93da6abd07214bcfd700)) +* Unsigned integers cannot be negated ([#3688](https://github.com/noir-lang/noir/issues/3688)) ([f904ae1](https://github.com/noir-lang/noir/commit/f904ae1065af74652b2111ea17b72f994de37472)) + + +### Miscellaneous Chores + +* Make file manager read-only to the compiler ([#3760](https://github.com/noir-lang/noir/issues/3760)) ([e3dcc21](https://github.com/noir-lang/noir/commit/e3dcc21cb2c0fef7f28f50b018747c4f09609b11)) +* Remove unused `source-resolver` package ([#3791](https://github.com/noir-lang/noir/issues/3791)) ([57d2505](https://github.com/noir-lang/noir/commit/57d2505d53e2233becd1e2a7de882c4acb518eff)) + ## [0.20.0](https://github.com/noir-lang/noir/compare/v0.19.5...v0.20.0) (2023-12-01) @@ -1381,7 +1550,7 @@ * **acvm:** Update to acvm 0.4.1 ([#779](https://github.com/noir-lang/noir/issues/779)) ([6f57e86](https://github.com/noir-lang/noir/commit/6f57e86c3d51191aa516a3b9315337b925810433)) * **ci:** Add concurrency group for rust workflow ([#806](https://github.com/noir-lang/noir/issues/806)) ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) -* **ci:** Add concurreny group for rust workflow ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) +* **ci:** Add concurrency group for rust workflow ([1b80f55](https://github.com/noir-lang/noir/commit/1b80f559599c2a7d7b8697f42f63db8e59d318c5)) * **ci:** Build binaries when a release is made ([#773](https://github.com/noir-lang/noir/issues/773)) ([a0c0c2c](https://github.com/noir-lang/noir/commit/a0c0c2c354b50c80eba425ba2f8c235015696c35)) * Impls with generics ([#798](https://github.com/noir-lang/noir/issues/798)) ([bea735d](https://github.com/noir-lang/noir/commit/bea735d98e162f42df5957781638101c1e6c75f6)) * **nargo:** add flag to verify created proofs ([#737](https://github.com/noir-lang/noir/issues/737)) ([e981c7c](https://github.com/noir-lang/noir/commit/e981c7ca0ab23073339869a7d45c04ae10fe1adf)) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9cbbeeb677f..d2553b003f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -92,6 +92,10 @@ This strategy avoids scenarios where pull requests grow too large/out-of-scope a The easiest way to do this is to have multiple Conventional Commits while you work and then you can cherry-pick the smaller changes into separate branches for pull requesting. +### Typos and other small changes + +Significant changes, like new features or important bug fixes, typically have a more pronounced impact on the project’s overall development. For smaller fixes, such as typos, we encourage you to report them instead of opening PRs. This approach helps us manage our resources effectively and ensures that every change contributes meaningfully to the project. PRs involving such smaller fixes will likely be closed and incorporated in PRs authored by the core team. + ### Reviews For any repository in the noir-lang organization, we require code review & approval by __one__ Noir team member before the changes are merged, as enforced by GitHub branch protection. Non-breaking pull requests may be merged at any time. Breaking pull requests should only be merged when the team has general agreement of the changes and is preparing a breaking release. diff --git a/Cargo.lock b/Cargo.lock index 53b0f7970ef..8687fe07aea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,10 +4,10 @@ version = 3 [[package]] name = "acir" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acir_field", - "base64", + "base64 0.21.2", "bincode", "brillig", "flate2", @@ -23,12 +23,12 @@ dependencies = [ [[package]] name = "acir_field" -version = "0.37.0" +version = "0.39.0" dependencies = [ "ark-bls12-381", "ark-bn254", "ark-ff", - "cfg-if", + "cfg-if 1.0.0", "hex", "num-bigint", "num-traits", @@ -37,28 +37,29 @@ dependencies = [ [[package]] name = "acvm" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acir", "acvm_blackbox_solver", - "acvm_stdlib", "brillig_vm", "indexmap 1.9.3", "num-bigint", - "num-traits", "paste", "proptest", - "rand", + "rand 0.8.5", "thiserror", + "tracing", ] [[package]] name = "acvm_blackbox_solver" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acir", "blake2", + "blake3", "k256", + "keccak", "p256", "sha2", "sha3", @@ -67,30 +68,23 @@ dependencies = [ [[package]] name = "acvm_js" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acvm", - "barretenberg_blackbox_solver", + "bn254_blackbox_solver", "build-data", - "cfg-if", + "cfg-if 1.0.0", "console_error_panic_hook", "const-str", "gloo-utils", "js-sys", - "log", "pkg-config", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", "wasm-bindgen-futures", "wasm-bindgen-test", - "wasm-logger", -] - -[[package]] -name = "acvm_stdlib" -version = "0.37.0" -dependencies = [ - "acir", ] [[package]] @@ -114,21 +108,22 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom", + "getrandom 0.2.10", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ - "cfg-if", - "getrandom", + "cfg-if 1.0.0", + "getrandom 0.2.10", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -217,7 +212,7 @@ checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" [[package]] name = "arena" -version = "0.20.0" +version = "0.23.0" dependencies = [ "generational-arena", ] @@ -348,9 +343,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand", + "rand 0.8.5", ] +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + [[package]] name = "arrayvec" version = "0.7.4" @@ -393,7 +394,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "138985dd8aefbefeaa66b01b7f5b2b6b4c333fcef1cc5f32c63a2aabe37d6de3" dependencies = [ - "futures", + "futures 0.3.28", "lsp-types 0.94.1", "pin-project-lite", "rustix", @@ -407,17 +408,6 @@ dependencies = [ "waitpid-any", ] -[[package]] -name = "atty" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] - [[package]] name = "autocfg" version = "1.1.0" @@ -426,7 +416,7 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "aztec_macros" -version = "0.20.0" +version = "0.23.0" dependencies = [ "iter-extended", "noirc_frontend", @@ -442,7 +432,6 @@ dependencies = [ "const_format", "dirs", "flate2", - "log", "reqwest", "serde", "serde_json", @@ -450,6 +439,7 @@ dependencies = [ "tempfile", "test-binary", "thiserror", + "tracing", ] [[package]] @@ -460,41 +450,25 @@ checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12" dependencies = [ "addr2line", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "miniz_oxide", "object", "rustc-demangle", ] -[[package]] -name = "barretenberg_blackbox_solver" -version = "0.37.0" -dependencies = [ - "acir", - "acvm_blackbox_solver", - "ark-ec", - "ark-ff", - "flate2", - "getrandom", - "grumpkin", - "js-sys", - "num-bigint", - "pkg-config", - "reqwest", - "rust-embed", - "tar", - "thiserror", - "wasm-bindgen-futures", - "wasmer", -] - [[package]] name = "base16ct" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "349a06037c7bf932dd7e7d1f653678b2038b9ad46a74102f1fc7bd7872678cce" +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.2" @@ -582,6 +556,19 @@ dependencies = [ "digest", ] +[[package]] +name = "blake3" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if 1.0.0", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -591,9 +578,31 @@ dependencies = [ "generic-array", ] +[[package]] +name = "bn254_blackbox_solver" +version = "0.39.0" +dependencies = [ + "acir", + "acvm_blackbox_solver", + "ark-ec", + "ark-ff", + "flate2", + "getrandom 0.2.10", + "grumpkin", + "js-sys", + "num-bigint", + "pkg-config", + "reqwest", + "rust-embed", + "tar", + "thiserror", + "wasm-bindgen-futures", + "wasmer", +] + [[package]] name = "brillig" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acir_field", "serde", @@ -601,7 +610,7 @@ dependencies = [ [[package]] name = "brillig_vm" -version = "0.37.0" +version = "0.39.0" dependencies = [ "acir", "acvm_blackbox_solver", @@ -744,6 +753,12 @@ version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" +[[package]] +name = "cfg-if" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" + [[package]] name = "cfg-if" version = "1.0.0" @@ -831,7 +846,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -869,7 +884,7 @@ checksum = "fc4159b76af02757139baf42c0c971c6dc155330999fbfd8eddb29b97fb2db68" dependencies = [ "codespan-reporting", "lsp-types 0.88.0", - "url", + "url 2.4.0", ] [[package]] @@ -916,6 +931,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "comma" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" + [[package]] name = "console" version = "0.15.7" @@ -934,7 +955,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "wasm-bindgen", ] @@ -970,6 +991,18 @@ dependencies = [ "unicode-xid", ] +[[package]] +name = "constant_time_eq" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" + +[[package]] +name = "convert_case" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" + [[package]] name = "core-foundation-sys" version = "0.8.4" @@ -983,7 +1016,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "80128832c58ea9cbd041d2a759ec449224487b2c1e400453d99d244eead87a8e" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "libc", "scopeguard", "windows-sys 0.33.0", @@ -995,7 +1028,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ee34052ee3d93d6d8f3e6f81d85c47921f6653a19a7b70e939e3e602d893a674" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1096,7 +1129,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1137,11 +1170,11 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "14c3242926edf34aec4ac3a77108ad4854bffaa2e4ddc1824124ce59231302d5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] @@ -1151,7 +1184,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-epoch", "crossbeam-utils", ] @@ -1163,7 +1196,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" dependencies = [ "autocfg", - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", "memoffset 0.9.0", "scopeguard", @@ -1175,17 +1208,17 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" +checksum = "c06d96137f14f244c37f989d9fff8f95e6c18b918e71f36638f8c49112e4c78f" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1195,7 +1228,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef2b4b23cddf68b89b8f8069890e8c270d54e2d5fe1b143820234805e4cb17ef" dependencies = [ "generic-array", - "rand_core", + "rand_core 0.6.4", "subtle", "zeroize", ] @@ -1263,7 +1296,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1274,7 +1307,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1283,11 +1316,11 @@ version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core", + "parking_lot_core 0.9.8", ] [[package]] @@ -1329,6 +1362,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive_more" +version = "0.99.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +dependencies = [ + "convert_case", + "proc-macro2", + "quote", + "rustc_version", + "syn 1.0.109", +] + [[package]] name = "difflib" version = "0.4.0" @@ -1361,7 +1407,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "dirs-sys-next", ] @@ -1440,7 +1486,7 @@ dependencies = [ "generic-array", "group", "pkcs8", - "rand_core", + "rand_core 0.6.4", "sec1", "subtle", "zeroize", @@ -1464,7 +1510,7 @@ version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1511,20 +1557,7 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", -] - -[[package]] -name = "env_logger" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12e6657c4c97ebab115a42dcee77225f7f482cdd841cf7088c657a42e9e00e7" -dependencies = [ - "atty", - "humantime", - "log", - "regex", - "termcolor", + "syn 2.0.32", ] [[package]] @@ -1581,7 +1614,7 @@ version = "3.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef033ed5e9bad94e55838ca0ca906db0e043f517adda0c8b79c7a8c66c93c1b5" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "rustix", "windows-sys 0.48.0", ] @@ -1592,7 +1625,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d013fc25338cc558c5c2cfbad646908fb23591e2404481826742b651c9af7160" dependencies = [ - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1611,7 +1644,7 @@ version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "windows-sys 0.48.0", @@ -1631,9 +1664,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.26" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b9429470923de8e8cbd4d2dc513535400b4b3fef0319fb5c4e1f520a7bef743" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1650,7 +1683,7 @@ dependencies = [ [[package]] name = "fm" -version = "0.20.0" +version = "0.23.0" dependencies = [ "codespan-reporting", "iter-extended", @@ -1670,7 +1703,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "percent-encoding", + "percent-encoding 2.3.0", ] [[package]] @@ -1679,6 +1712,12 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" +[[package]] +name = "futures" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" + [[package]] name = "futures" version = "0.3.28" @@ -1687,6 +1726,7 @@ checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", + "futures-executor", "futures-io", "futures-sink", "futures-task", @@ -1709,6 +1749,18 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", + "num_cpus", +] + [[package]] name = "futures-io" version = "0.3.28" @@ -1723,7 +1775,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -1744,6 +1796,7 @@ version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" dependencies = [ + "futures 0.1.31", "futures-channel", "futures-core", "futures-io", @@ -1771,7 +1824,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877e94aff08e743b651baaea359664321055749b398adff8740a7399af7796e7" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", ] [[package]] @@ -1784,16 +1837,27 @@ dependencies = [ "version_check", ] +[[package]] +name = "getrandom" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" +dependencies = [ + "cfg-if 1.0.0", + "libc", + "wasi 0.9.0+wasi-snapshot-preview1", +] + [[package]] name = "getrandom" version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "wasm-bindgen", ] @@ -1864,7 +1928,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dfbfb3a6cfbd390d5c9564ab283a0349b9b9fcd46a706c1eb10e0db70bfbac7" dependencies = [ "ff", - "rand_core", + "rand_core 0.6.4", "subtle", ] @@ -1881,9 +1945,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.20" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97ec8491ebaf99c8eaa73058b045fe58073cd6be7f596ac993ced0b0a0c01049" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", @@ -1891,10 +1955,10 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.0.0", "slab", "tokio", - "tokio-util", + "tokio-util 0.7.8", "tracing", ] @@ -1928,7 +1992,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", ] [[package]] @@ -1952,15 +2016,6 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.1.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - [[package]] name = "hermit-abi" version = "0.3.2" @@ -2016,12 +2071,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" -[[package]] -name = "humantime" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" - [[package]] name = "hyper" version = "0.14.27" @@ -2039,7 +2088,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2095,6 +2144,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" +[[package]] +name = "idna" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f09e0f0b1fb55fdee1f17470ad800da77af5186a1a76c026b679358b7e844e" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "idna" version = "0.4.0" @@ -2129,7 +2189,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0acd33ff0285af998aaf9b57342af478078f53492322fafc47450e09397e0e9" dependencies = [ "bitmaps", - "rand_core", + "rand_core 0.6.4", "rand_xoshiro", "serde", "sized-chunks", @@ -2195,7 +2255,7 @@ version = "0.11.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2fb7c1b80a1dfa604bb4a649a5c5aeef3d913f7c520cb42b40e534e8a61bcdfc" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "indexmap 1.9.3", "is-terminal", "itoa", @@ -2207,6 +2267,15 @@ dependencies = [ "str_stack", ] +[[package]] +name = "instant" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +dependencies = [ + "cfg-if 1.0.0", +] + [[package]] name = "ipnet" version = "2.8.0" @@ -2219,14 +2288,14 @@ version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "rustix", "windows-sys 0.48.0", ] [[package]] name = "iter-extended" -version = "0.20.0" +version = "0.23.0" [[package]] name = "itertools" @@ -2252,13 +2321,127 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpc" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34efde8d2422fb79ed56db1d3aea8fa5b583351d15a26770cdee2f88813dd702" +dependencies = [ + "base64 0.13.1", + "minreq", + "serde", + "serde_json", +] + +[[package]] +name = "jsonrpc-client-transports" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2b99d4207e2a04fb4581746903c2bb7eb376f88de9c699d0f3e10feeac0cd3a" +dependencies = [ + "derive_more", + "futures 0.3.28", + "jsonrpc-core", + "jsonrpc-pubsub", + "log", + "serde", + "serde_json", + "url 1.7.2", +] + +[[package]] +name = "jsonrpc-core" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14f7f76aef2d054868398427f6c54943cf3d1caa9a7ec7d0c38d69df97a965eb" +dependencies = [ + "futures 0.3.28", + "futures-executor", + "futures-util", + "log", + "serde", + "serde_derive", + "serde_json", +] + +[[package]] +name = "jsonrpc-core-client" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b51da17abecbdab3e3d4f26b01c5ec075e88d3abe3ab3b05dc9aa69392764ec0" +dependencies = [ + "futures 0.3.28", + "jsonrpc-client-transports", +] + +[[package]] +name = "jsonrpc-derive" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b939a78fa820cdfcb7ee7484466746a7377760970f6f9c6fe19f9edcc8a38d2" +dependencies = [ + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "jsonrpc-http-server" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1dea6e07251d9ce6a552abfb5d7ad6bc290a4596c8dcc3d795fae2bbdc1f3ff" +dependencies = [ + "futures 0.3.28", + "hyper", + "jsonrpc-core", + "jsonrpc-server-utils", + "log", + "net2", + "parking_lot 0.11.2", + "unicase", +] + +[[package]] +name = "jsonrpc-pubsub" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240f87695e6c6f62fb37f05c02c04953cf68d6408b8c1c89de85c7a0125b1011" +dependencies = [ + "futures 0.3.28", + "jsonrpc-core", + "lazy_static", + "log", + "parking_lot 0.11.2", + "rand 0.7.3", + "serde", +] + +[[package]] +name = "jsonrpc-server-utils" +version = "18.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa4fdea130485b572c39a460d50888beb00afb3e35de23ccd7fad8ff19f0e0d4" +dependencies = [ + "bytes", + "futures 0.3.28", + "globset", + "jsonrpc-core", + "lazy_static", + "log", + "tokio", + "tokio-stream", + "tokio-util 0.6.10", + "unicase", +] + [[package]] name = "k256" version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72c1e0b51e7ec0a97369623508396067a486bd0cbed95a2659a4b863d28cfc8b" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "ecdsa", "elliptic-curve", "sha2", @@ -2287,9 +2470,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.147" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libm" @@ -2338,7 +2521,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2351,7 +2534,7 @@ dependencies = [ "serde", "serde_json", "serde_repr", - "url", + "url 2.4.0", ] [[package]] @@ -2363,6 +2546,21 @@ dependencies = [ "libc", ] +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + [[package]] name = "memchr" version = "2.5.0" @@ -2396,15 +2594,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61c719bcfbcf5d62b3a09efa6088de8c54bc0bfcd3ea7ae39fcc186108b8de1" -dependencies = [ - "autocfg", -] - [[package]] name = "memoffset" version = "0.9.0" @@ -2429,14 +2618,25 @@ dependencies = [ "adler", ] +[[package]] +name = "minreq" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3371dfc7b772c540da1380123674a8e20583aca99907087d990ca58cf44203" +dependencies = [ + "log", + "serde", + "serde_json", +] + [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", - "wasi", + "wasi 0.11.0+wasi-snapshot-preview1", "windows-sys 0.48.0", ] @@ -2448,12 +2648,17 @@ checksum = "7843ec2de400bcbc6a6328c958dc38e5359da6e93e72e37bc5246bf1ae776389" [[package]] name = "nargo" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "codespan-reporting", "fm", "iter-extended", + "jsonrpc", + "jsonrpc-core", + "jsonrpc-core-client", + "jsonrpc-derive", + "jsonrpc-http-server", "noirc_abi", "noirc_driver", "noirc_errors", @@ -2463,21 +2668,22 @@ dependencies = [ "rayon", "rustc_version", "serde", + "serial_test", "tempfile", "thiserror", + "tracing", ] [[package]] name = "nargo_cli" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "assert_cmd", "assert_fs", "async-lsp", "backend-interface", - "barretenberg_blackbox_solver", - "bb_abstraction_leaks", + "bn254_blackbox_solver", "build-data", "clap", "color-eyre", @@ -2485,7 +2691,6 @@ dependencies = [ "criterion", "dap", "dirs", - "env_logger", "fm", "hex", "iai", @@ -2513,29 +2718,28 @@ dependencies = [ "test-binary", "thiserror", "tokio", - "tokio-util", - "toml", + "tokio-util 0.7.8", + "toml 0.7.6", "tower", - "tracing", "tracing-appender", "tracing-subscriber", ] [[package]] name = "nargo_fmt" -version = "0.20.0" +version = "0.23.0" dependencies = [ "bytecount", "noirc_frontend", "serde", "similar-asserts", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] name = "nargo_toml" -version = "0.20.0" +version = "0.23.0" dependencies = [ "dirs", "fm", @@ -2544,8 +2748,19 @@ dependencies = [ "semver", "serde", "thiserror", - "toml", - "url", + "toml 0.7.6", + "url 2.4.0", +] + +[[package]] +name = "net2" +version = "0.2.39" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b13b648036a2339d06de780866fbdfda0dde886de7b3af2ddeba8b14f4ee34ac" +dependencies = [ + "cfg-if 0.1.10", + "libc", + "winapi", ] [[package]] @@ -2565,28 +2780,43 @@ checksum = "8f3790c00a0150112de0f4cd161e3d7fc4b2d8a5542ffc35f099a2562aecb35c" dependencies = [ "bitflags 1.3.2", "cc", - "cfg-if", + "cfg-if 1.0.0", "libc", "memoffset 0.6.5", ] [[package]] name = "nix" -version = "0.26.2" +version = "0.25.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f346ff70e7dbfd675fe90590b92d59ef2de15a8779ae305ebcbfd3f0caf59be4" +dependencies = [ + "autocfg", + "bitflags 1.3.2", + "cfg-if 1.0.0", + "libc", + "memoffset 0.6.5", + "pin-utils", +] + +[[package]] +name = "nix" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "libc", - "static_assertions", ] [[package]] name = "noir_debugger" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", + "assert_cmd", + "build-data", "codespan-reporting", "dap", "easy-repl", @@ -2597,18 +2827,22 @@ dependencies = [ "noirc_frontend", "noirc_printable_type", "owo-colors", + "rexpect", "serde_json", + "tempfile", + "test-binary", "thiserror", ] [[package]] name = "noir_lsp" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "async-lsp", "codespan-lsp", "fm", + "fxhash", "lsp-types 0.94.1", "nargo", "nargo_fmt", @@ -2616,9 +2850,11 @@ dependencies = [ "noirc_driver", "noirc_errors", "noirc_frontend", + "rayon", "serde", "serde_json", "serde_with", + "thiserror", "tokio", "tower", "wasm-bindgen", @@ -2626,28 +2862,30 @@ dependencies = [ [[package]] name = "noir_wasm" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "build-data", "console_error_panic_hook", "fm", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "js-sys", - "log", "nargo", "noirc_driver", "noirc_errors", + "noirc_evaluator", "noirc_frontend", + "rust-embed", "serde", + "tracing-subscriber", + "tracing-web", "wasm-bindgen", - "wasm-logger", ] [[package]] name = "noirc_abi" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "iter-extended", @@ -2659,17 +2897,17 @@ dependencies = [ "strum", "strum_macros", "thiserror", - "toml", + "toml 0.7.6", ] [[package]] name = "noirc_abi_wasm" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "build-data", "console_error_panic_hook", - "getrandom", + "getrandom 0.2.10", "gloo-utils", "iter-extended", "js-sys", @@ -2681,7 +2919,7 @@ dependencies = [ [[package]] name = "noirc_driver" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "aztec_macros", @@ -2696,25 +2934,30 @@ dependencies = [ "noirc_frontend", "rust-embed", "serde", + "tracing", ] [[package]] name = "noirc_errors" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", + "base64 0.21.2", "chumsky", "codespan", "codespan-reporting", + "flate2", "fm", "noirc_printable_type", "serde", + "serde_json", "serde_with", + "tracing", ] [[package]] name = "noirc_evaluator" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "fxhash", @@ -2725,11 +2968,12 @@ dependencies = [ "num-bigint", "serde", "thiserror", + "tracing", ] [[package]] name = "noirc_frontend" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "arena", @@ -2746,15 +2990,18 @@ dependencies = [ "smol_str", "strum", "strum_macros", + "tempfile", "thiserror", + "tracing", ] [[package]] name = "noirc_printable_type" -version = "0.20.0" +version = "0.23.0" dependencies = [ "acvm", "iter-extended", + "jsonrpc", "regex", "serde", "serde_json", @@ -2824,7 +3071,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.2", + "hermit-abi", "libc", ] @@ -2872,6 +3119,17 @@ dependencies = [ "sha2", ] +[[package]] +name = "parking_lot" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99" +dependencies = [ + "instant", + "lock_api", + "parking_lot_core 0.8.6", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -2879,7 +3137,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core", + "parking_lot_core 0.9.8", +] + +[[package]] +name = "parking_lot_core" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" +dependencies = [ + "cfg-if 1.0.0", + "instant", + "libc", + "redox_syscall 0.2.16", + "smallvec", + "winapi", ] [[package]] @@ -2888,7 +3160,7 @@ version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "libc", "redox_syscall 0.3.5", "smallvec", @@ -2901,6 +3173,12 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "percent-encoding" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831" + [[package]] name = "percent-encoding" version = "2.3.0" @@ -2925,7 +3203,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d5285893bb5eb82e6aaf5d59ee909a06a16737a8970984dd7746ba9283498d6" dependencies = [ "phf_shared", - "rand", + "rand 0.8.5", ] [[package]] @@ -2953,9 +3231,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.10" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c40d25201921e5ff0c862a505c6557ea88568a4e3ace775ab55e93f2f4f9d57" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3014,15 +3292,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978385d59daf9269189d052ca8a84c1acfd0715c0599a5d5188d4acc078ca46a" dependencies = [ "backtrace", - "cfg-if", + "cfg-if 1.0.0", "criterion", "findshlibs", "inferno", "libc", "log", - "nix 0.26.2", + "nix 0.26.4", "once_cell", - "parking_lot", + "parking_lot 0.12.1", "smallvec", "symbolic-demangle", "tempfile", @@ -3091,6 +3369,15 @@ dependencies = [ "unicode-width", ] +[[package]] +name = "proc-macro-crate" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +dependencies = [ + "toml 0.5.11", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3141,10 +3428,10 @@ dependencies = [ "bitflags 2.3.3", "lazy_static", "num-traits", - "rand", - "rand_chacha", + "rand 0.8.5", + "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax", + "regex-syntax 0.7.4", "rusty-fork", "tempfile", "unarray", @@ -3210,6 +3497,19 @@ dependencies = [ "nibble_vec", ] +[[package]] +name = "rand" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" +dependencies = [ + "getrandom 0.1.16", + "libc", + "rand_chacha 0.2.2", + "rand_core 0.5.1", + "rand_hc", +] + [[package]] name = "rand" version = "0.8.5" @@ -3217,8 +3517,18 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha", - "rand_core", + "rand_chacha 0.3.1", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" +dependencies = [ + "ppv-lite86", + "rand_core 0.5.1", ] [[package]] @@ -3228,7 +3538,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.6.4", +] + +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" +dependencies = [ + "getrandom 0.1.16", ] [[package]] @@ -3237,7 +3556,16 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom", + "getrandom 0.2.10", +] + +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" +dependencies = [ + "rand_core 0.5.1", ] [[package]] @@ -3246,7 +3574,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3255,7 +3583,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f97cdb2a36ed4183de61b2f824cc45c9f1037f28afe0a322e9fff4c108b5aaa" dependencies = [ - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -3302,7 +3630,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom", + "getrandom 0.2.10", "redox_syscall 0.2.16", "thiserror", ] @@ -3328,7 +3656,7 @@ dependencies = [ "aho-corasick", "memchr", "regex-automata 0.3.3", - "regex-syntax", + "regex-syntax 0.7.4", ] [[package]] @@ -3336,6 +3664,9 @@ name = "regex-automata" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] [[package]] name = "regex-automata" @@ -3345,9 +3676,15 @@ checksum = "39354c10dd07468c2e73926b23bb9c2caca74c5501e38a35da70406f1d923310" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.7.4", ] +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + [[package]] name = "regex-syntax" version = "0.7.4" @@ -3381,7 +3718,7 @@ version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64", + "base64 0.21.2", "bytes", "encoding_rs", "futures-core", @@ -3396,7 +3733,7 @@ dependencies = [ "log", "mime", "once_cell", - "percent-encoding", + "percent-encoding 2.3.0", "pin-project-lite", "rustls", "rustls-pemfile", @@ -3406,7 +3743,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "url", + "url 2.4.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -3414,6 +3751,19 @@ dependencies = [ "winreg", ] +[[package]] +name = "rexpect" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01ff60778f96fb5a48adbe421d21bf6578ed58c0872d712e7e08593c195adff8" +dependencies = [ + "comma", + "nix 0.25.1", + "regex", + "tempfile", + "thiserror", +] + [[package]] name = "rfc6979" version = "0.3.1" @@ -3499,7 +3849,7 @@ dependencies = [ "quote", "rust-embed-utils", "shellexpand", - "syn 2.0.26", + "syn 2.0.32", "walkdir", ] @@ -3566,7 +3916,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64", + "base64 0.21.2", ] [[package]] @@ -3604,7 +3954,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db7826789c0e25614b03e5a54a0717a86f9ff6e6e5247f92b369472869320039" dependencies = [ "bitflags 1.3.2", - "cfg-if", + "cfg-if 1.0.0", "clipboard-win", "dirs-next", "fd-lock", @@ -3809,7 +4159,7 @@ checksum = "741e124f5485c7e60c03b043f79f320bff3527f4bbf12cf3831750dc46a0ec2c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -3831,7 +4181,7 @@ checksum = "1d89a8107374290037607734c0b73a85db7ed80cae314b3c5791f192a496e731" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -3861,7 +4211,7 @@ version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1402f54f9a3b9e2efe71c1cea24e648acce55887983553eeb858cf3115acfd49" dependencies = [ - "base64", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", @@ -3881,7 +4231,32 @@ dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", +] + +[[package]] +name = "serial_test" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e56dd856803e253c8f298af3f4d7eb0ae5e23a737252cd90bb4f3b435033b2d" +dependencies = [ + "dashmap", + "futures 0.3.28", + "lazy_static", + "log", + "parking_lot 0.12.1", + "serial_test_derive", +] + +[[package]] +name = "serial_test_derive" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.32", ] [[package]] @@ -3890,7 +4265,7 @@ version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "cpufeatures", "digest", ] @@ -3946,7 +4321,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" dependencies = [ "digest", - "rand_core", + "rand_core 0.6.4", ] [[package]] @@ -4046,6 +4421,16 @@ dependencies = [ "winapi", ] +[[package]] +name = "socket2" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +dependencies = [ + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "spin" version = "0.5.2" @@ -4068,12 +4453,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "str-buf" version = "1.0.6" @@ -4153,9 +4532,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.26" +version = "2.0.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "239814284fd6f1a4ffe4ca893952cdd93c224b6a1571c9a9eadd670295c0c9e2" dependencies = [ "proc-macro2", "quote", @@ -4191,7 +4570,7 @@ version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -4226,9 +4605,9 @@ checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" [[package]] name = "test-binary" -version = "3.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb28771e7854f02e5705f2a1b09451d932a273f5a4ec1c9fa4c65882b8b7b6ca" +checksum = "6c7cb854285c40b61c0fade358bf63a2bb1226688a1ea11432ea65349209e6e3" dependencies = [ "camino", "cargo_metadata", @@ -4275,7 +4654,7 @@ checksum = "463fe12d7993d3b327787537ce8dd4dfa058de32fc2b195ef3cde03dc4771e8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4284,7 +4663,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "once_cell", ] @@ -4343,31 +4722,30 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.29.1" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "532826ff75199d5833b9d2c5fe410f29235e25704ee5f0ef599fb51c21f4a4da" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" dependencies = [ - "autocfg", "backtrace", "bytes", "libc", "mio", "num_cpus", "pin-project-lite", - "socket2", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4380,6 +4758,31 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.6.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36943ee01a6d67977dd3f84a5a1d2efeb4ada3a1ae771cadfaa535d9d9fc6507" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "log", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.8" @@ -4395,6 +4798,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "toml" version = "0.7.6" @@ -4484,7 +4896,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] [[package]] @@ -4524,14 +4936,31 @@ version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ + "matchers", "nu-ansi-term", + "once_cell", + "regex", "sharded-slab", "smallvec", "thread_local", + "tracing", "tracing-core", "tracing-log", ] +[[package]] +name = "tracing-web" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e6a141feebd51f8d91ebfd785af50fca223c570b86852166caa3b141defe7c" +dependencies = [ + "js-sys", + "tracing-core", + "tracing-subscriber", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "trie-rs" version = "0.1.1" @@ -4559,6 +4988,15 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" +dependencies = [ + "version_check", +] + [[package]] name = "unicode-bidi" version = "0.3.13" @@ -4610,6 +5048,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "url" +version = "1.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a" +dependencies = [ + "idna 0.1.5", + "matches", + "percent-encoding 1.0.1", +] + [[package]] name = "url" version = "2.4.0" @@ -4617,8 +5066,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", - "idna", - "percent-encoding", + "idna 0.4.0", + "percent-encoding 2.3.0", "serde", ] @@ -4684,6 +5133,12 @@ dependencies = [ "try-lock", ] +[[package]] +name = "wasi" +version = "0.9.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -4696,7 +5151,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "serde", "serde_json", "wasm-bindgen-macro", @@ -4713,40 +5168,17 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-shared", ] -[[package]] -name = "wasm-bindgen-downcast" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dac026d43bcca6e7ce1c0956ba68f59edf6403e8e930a5d891be72c31a44340" -dependencies = [ - "js-sys", - "once_cell", - "wasm-bindgen", - "wasm-bindgen-downcast-macros", -] - -[[package]] -name = "wasm-bindgen-downcast-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5020cfa87c7cecefef118055d44e3c1fc122c7ec25701d528ee458a0b45f38f" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "wasm-bindgen-futures" version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "js-sys", "wasm-bindgen", "web-sys", @@ -4770,7 +5202,7 @@ checksum = "e128beba882dd1eb6200e1dc92ae6c5dbaa4311aa7bb211ca035779e5efc39f8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4814,25 +5246,14 @@ dependencies = [ "leb128", ] -[[package]] -name = "wasm-logger" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074649a66bb306c8f2068c9016395fa65d8e08d2affcbf95acf3c24c3ab19718" -dependencies = [ - "log", - "wasm-bindgen", - "web-sys", -] - [[package]] name = "wasmer" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50cb1ae2956aac1fbbcf334c543c1143cdf7d5b0a5fb6c3d23a17bf37dd1f47b" +checksum = "ce45cc009177ca345a6d041f9062305ad467d15e7d41494f5b81ab46d62d7a58" dependencies = [ "bytes", - "cfg-if", + "cfg-if 1.0.0", "derivative", "indexmap 1.9.3", "js-sys", @@ -4844,7 +5265,6 @@ dependencies = [ "target-lexicon", "thiserror", "wasm-bindgen", - "wasm-bindgen-downcast", "wasmer-compiler", "wasmer-compiler-cranelift", "wasmer-derive", @@ -4858,13 +5278,13 @@ dependencies = [ [[package]] name = "wasmer-compiler" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12fd9aeef339095798d1e04957d5657d97490b1112f145cbf08b98f6393b4a0a" +checksum = "e044f6140c844602b920deb4526aea3cc9c0d7cf23f00730bb9b2034669f522a" dependencies = [ "backtrace", "bytes", - "cfg-if", + "cfg-if 1.0.0", "enum-iterator", "enumset", "lazy_static", @@ -4885,9 +5305,9 @@ dependencies = [ [[package]] name = "wasmer-compiler-cranelift" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "344f5f1186c122756232fe7f156cc8d2e7bf333d5a658e81e25efa3415c26d07" +checksum = "32ce02358eb44a149d791c1d6648fb7f8b2f99cd55e3c4eef0474653ec8cc889" dependencies = [ "cranelift-codegen", "cranelift-entity", @@ -4904,9 +5324,9 @@ dependencies = [ [[package]] name = "wasmer-derive" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ac8c1f2dc0ed3c7412a5546e468365184a461f8ce7dfe2a707b621724339f91" +checksum = "c782d80401edb08e1eba206733f7859db6c997fc5a7f5fb44edc3ecd801468f6" dependencies = [ "proc-macro-error", "proc-macro2", @@ -4916,9 +5336,9 @@ dependencies = [ [[package]] name = "wasmer-types" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a57ecbf218c0a9348d4dfbdac0f9d42d9201ae276dffb13e61ea4ff939ecce7" +checksum = "fd09e80d4d74bb9fd0ce6c3c106b1ceba1a050f9948db9d9b78ae53c172d6157" dependencies = [ "bytecheck", "enum-iterator", @@ -4932,13 +5352,13 @@ dependencies = [ [[package]] name = "wasmer-vm" -version = "4.2.3" +version = "4.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60c3513477bc0097250f6e34a640e2a903bb0ee57e6bb518c427f72c06ac7728" +checksum = "bdcd8a4fd36414a7b6a003dbfbd32393bce3e155d715dd877c05c1b7a41d224d" dependencies = [ "backtrace", "cc", - "cfg-if", + "cfg-if 1.0.0", "corosensei", "crossbeam-queue", "dashmap", @@ -4949,7 +5369,7 @@ dependencies = [ "lazy_static", "libc", "mach", - "memoffset 0.8.0", + "memoffset 0.9.0", "more-asserts", "region", "scopeguard", @@ -4971,7 +5391,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2ea896273ea99b15132414be1da01ab0d8836415083298ecaffbe308eaac87a" dependencies = [ "indexmap 1.9.3", - "url", + "url 2.4.0", ] [[package]] @@ -5241,7 +5661,7 @@ version = "0.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" dependencies = [ - "cfg-if", + "cfg-if 1.0.0", "windows-sys 0.48.0", ] @@ -5263,6 +5683,26 @@ dependencies = [ "libc", ] +[[package]] +name = "zerocopy" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.32", +] + [[package]] name = "zeroize" version = "1.6.0" @@ -5280,5 +5720,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.26", + "syn 2.0.32", ] diff --git a/Cargo.toml b/Cargo.toml index 3d4b6d33d63..5dfff3dbb5d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,36 +28,34 @@ members = [ "acvm-repo/acir", "acvm-repo/acvm", "acvm-repo/acvm_js", - "acvm-repo/stdlib", "acvm-repo/brillig", "acvm-repo/brillig_vm", "acvm-repo/blackbox_solver", - "acvm-repo/barretenberg_blackbox_solver", + "acvm-repo/bn254_blackbox_solver", ] default-members = ["tooling/nargo_cli"] resolver = "2" [workspace.package] # x-release-please-start-version -version = "0.20.0" +version = "0.23.0" # x-release-please-end authors = ["The Noir Team "] edition = "2021" -rust-version = "1.66" +rust-version = "1.71.1" license = "MIT OR Apache-2.0" repository = "https://github.com/noir-lang/noir/" [workspace.dependencies] # ACVM workspace dependencies -acir_field = { version = "0.37.0", path = "acvm-repo/acir_field", default-features = false } -acir = { version = "0.37.0", path = "acvm-repo/acir", default-features = false } -acvm = { version = "0.37.0", path = "acvm-repo/acvm" } -stdlib = { version = "0.37.0", package = "acvm_stdlib", path = "acvm-repo/stdlib", default-features = false } -brillig = { version = "0.37.0", path = "acvm-repo/brillig", default-features = false } -brillig_vm = { version = "0.37.0", path = "acvm-repo/brillig_vm", default-features = false } -acvm_blackbox_solver = { version = "0.37.0", path = "acvm-repo/blackbox_solver", default-features = false } -barretenberg_blackbox_solver = { version = "0.37.0", path = "acvm-repo/barretenberg_blackbox_solver", default-features = false } +acir_field = { version = "0.39.0", path = "acvm-repo/acir_field", default-features = false } +acir = { version = "0.39.0", path = "acvm-repo/acir", default-features = false } +acvm = { version = "0.39.0", path = "acvm-repo/acvm" } +brillig = { version = "0.39.0", path = "acvm-repo/brillig", default-features = false } +brillig_vm = { version = "0.39.0", path = "acvm-repo/brillig_vm", default-features = false } +acvm_blackbox_solver = { version = "0.39.0", path = "acvm-repo/blackbox_solver", default-features = false } +bn254_blackbox_solver = { version = "0.39.0", path = "acvm-repo/bn254_blackbox_solver", default-features = false } # Noir compiler workspace dependencies arena = { path = "compiler/utils/arena" } @@ -122,9 +120,14 @@ const_format = "0.2.30" num-bigint = "0.4" num-traits = "0.2" similar-asserts = "1.5.0" -log = "0.4.17" +tempfile = "3.6.0" +jsonrpc = { version = "0.16.0", features = ["minreq_http"] } +flate2 = "1.0.24" tracing = "0.1.40" +tracing-web = "0.1.3" +tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } +rust-embed = "6.6.0" [profile.dev] # This is required to be able to run `cargo test` in acvm_js due to the `locals exceeds maximum` error. diff --git a/Dockerfile b/Dockerfile index ac818cb8bd2..000292e0a47 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,15 +1,12 @@ -FROM rust:alpine3.17 -RUN apk update \ - && apk upgrade \ - && apk add --no-cache \ - build-base \ - bash +FROM rust:bookworm WORKDIR /usr/src/noir COPY . . RUN ./scripts/bootstrap_native.sh -# When running the container, mount the current working directory to /project. -FROM alpine:3.17 +# When running the container, mount the users home directory to same location. +FROM ubuntu:lunar +# Install Tini as nargo doesn't handle signals properly. +# Install git as nargo needs it to clone. +RUN apt-get update && apt-get install -y git tini && rm -rf /var/lib/apt/lists/* && apt-get clean COPY --from=0 /usr/src/noir/target/release/nargo /usr/src/noir/target/release/nargo -WORKDIR /project -ENTRYPOINT ["/usr/src/noir/target/release/nargo"] \ No newline at end of file +ENTRYPOINT ["/usr/bin/tini", "--", "/usr/src/noir/target/release/nargo"] \ No newline at end of file diff --git a/Dockerfile.ci b/Dockerfile.ci index 9ca995fd94f..a73ce4ab969 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -1,40 +1,30 @@ -FROM rust:1-slim-bookworm as test-base +FROM rust:1.71.1-slim-bookworm as base RUN apt-get update && apt-get upgrade -y && apt-get install build-essential git -y WORKDIR /usr/src/noir -COPY . . -RUN ./scripts/bootstrap_native.sh -ENV PATH="${PATH}:/usr/src/noir/target/release/" +ENV PATH="${PATH}:/usr/src/noir/target/release" -FROM test-base as test-cargo -RUN apt-get install -y curl libc++-dev -RUN ./scripts/test_native.sh +FROM base as base-nargo +COPY . . +RUN .github/scripts/nargo-build.sh -FROM test-base as test-js -RUN apt-get install pkg-config libssl-dev -y -RUN ./scripts/install_wasm-bindgen.sh +FROM base as base-js RUN apt-get install -y ca-certificates curl gnupg RUN mkdir -p /etc/apt/keyrings RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg RUN echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x nodistro main" | tee /etc/apt/sources.list.d/nodesource.list RUN apt-get update && apt-get install nodejs -y RUN corepack enable -RUN yarn --immutable RUN apt-get install -y jq -RUN yarn build -RUN yarn workspace @noir-lang/acvm_js test -RUN npx playwright install && npx playwright install-deps -RUN yarn workspace @noir-lang/acvm_js test:browser -RUN yarn workspace @noir-lang/noirc_abi test -RUN yarn workspace @noir-lang/noirc_abi test:browser -RUN yarn workspace @noir-lang/backend_barretenberg test -RUN ./scripts/nargo_compile_noir_js_assert_lt.sh -RUN rm -rf /usr/src/noir/tooling/noir_js/test/noir_compiled_examples/assert_lt/target/debug_assert_lt.json -RUN yarn workspace @noir-lang/noir_js test -RUN ./scripts/nargo_compile_wasm_fixtures.sh -RUN yarn workspace @noir-lang/noir_wasm test:node -RUN yarn workspace @noir-lang/noir_wasm test:browser -RUN ./scripts/nargo_compile_noir_codegen_assert_lt.sh -RUN rm -rf /usr/src/noir/tooling/noir_codegen/test/assert_lt/target/debug_assert_lt.json -RUN yarn workspace @noir-lang/noir_codegen test -RUN apt-get install -y libc++-dev -RUN yarn test:integration +COPY yarn.lock package.json .yarnrc.yml ./ +COPY .yarn/ ./.yarn/ +COPY ./acvm-repo/acvm_js/package.json ./acvm-repo/acvm_js/ +COPY ./tooling/noirc_abi_wasm/package.json ./tooling/noirc_abi_wasm/ +COPY ./compiler/wasm/package.json ./compiler/wasm/ +COPY ./tooling/noir_js_types/package.json ./tooling/noir_js_types/ +COPY ./tooling/noir_js_backend_barretenberg/package.json ./tooling/noir_js_backend_barretenberg/ +COPY ./tooling/noir_js/package.json ./tooling/noir_js/ +COPY ./tooling/noir_codegen/package.json ./tooling/noir_codegen/ +COPY ./compiler/integration-tests/package.json ./compiler/integration-tests/ +COPY ./docs/package.json ./docs/ +RUN yarn --immutable +COPY . . diff --git a/Dockerfile.packages b/Dockerfile.packages index 17eb0bcd648..f40670c19e4 100644 --- a/Dockerfile.packages +++ b/Dockerfile.packages @@ -2,14 +2,15 @@ FROM rust:alpine3.17 RUN apk update \ && apk upgrade \ && apk add --no-cache \ - build-base \ - pkgconfig \ - openssl-dev \ - npm \ - yarn \ - bash \ - jq \ - git + build-base \ + pkgconfig \ + openssl-dev \ + npm \ + yarn \ + bash \ + jq \ + git \ + curl WORKDIR /usr/src/noir COPY . . @@ -18,4 +19,4 @@ RUN ./scripts/bootstrap_packages.sh FROM scratch COPY --from=0 /usr/src/noir/packages /usr/src/noir/packages # For some unknown reason, on alpine only, we need this to exist. -COPY --from=0 /usr/src/noir/node_modules/@noir-lang /usr/src/noir/node_modules/@noir-lang \ No newline at end of file +COPY --from=0 /usr/src/noir/node_modules/@noir-lang /usr/src/noir/node_modules/@noir-lang diff --git a/README.md b/README.md index 2fc47f16fef..771c3f1c74d 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Noir is a Domain Specific Language for SNARK proving systems. It has been design ## Quick Start -Read the installation section [here](https://noir-lang.org/getting_started/nargo_installation). +Read the installation section [here](https://noir-lang.org/docs/dev/getting_started/installation/). Once you have read through the documentation, you can visit [Awesome Noir](https://github.com/noir-lang/awesome-noir) to run some of the examples that others have created. @@ -58,7 +58,7 @@ This crate's minimum supported rustc version is 1.71.1. ## Working on this project -This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/getting_started/nargo_installation/#option-3-compile-from-source) to setup your environment for working on the project. +This project uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. Please follow [our guidelines](https://noir-lang.org/docs/getting_started/installation/other_install_methods#option-3-compile-from-source) to setup your environment for working on the project. ### Building against a different local/remote version of Barretenberg diff --git a/acvm-repo/CHANGELOG.md b/acvm-repo/CHANGELOG.md index fea0029744b..7f68244a7eb 100644 --- a/acvm-repo/CHANGELOG.md +++ b/acvm-repo/CHANGELOG.md @@ -5,6 +5,75 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.39.0](https://github.com/noir-lang/noir/compare/v0.38.0...v0.39.0) (2024-01-22) + + +### ⚠ BREAKING CHANGES + +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Breaking changes from aztec-packages ([#3955](https://github.com/noir-lang/noir/issues/3955)) ([5be049e](https://github.com/noir-lang/noir/commit/5be049eee6c342649462282ee04f6411e6ea392c)) +* Remove range constraints from witnesses which are constrained to be constants ([#3928](https://github.com/noir-lang/noir/issues/3928)) ([afe9c7a](https://github.com/noir-lang/noir/commit/afe9c7a38bb9d4245205d3aa46d4ce23d70a5671)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) +* Sync `aztec-packages` ([#4011](https://github.com/noir-lang/noir/issues/4011)) ([fee2452](https://github.com/noir-lang/noir/commit/fee24523c427c27f0bdaf98ea09a852a2da3e94c)) +* Sync commits from `aztec-packages` ([#4068](https://github.com/noir-lang/noir/issues/4068)) ([7a8f3a3](https://github.com/noir-lang/noir/commit/7a8f3a33b57875e681e3d81e667e3570a1cdbdcc)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) +* Return error rather instead of panicking on invalid circuit ([#3976](https://github.com/noir-lang/noir/issues/3976)) ([67201bf](https://github.com/noir-lang/noir/commit/67201bfc21a9c8858aa86be9cd47d463fb78d925)) + + +### Miscellaneous Chores + +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) +* Rename Arithmetic opcode to AssertZero ([#3840](https://github.com/noir-lang/noir/issues/3840)) ([836f171](https://github.com/noir-lang/noir/commit/836f17145c2901060706294461c2d282dd121b3e)) + +## [0.38.0](https://github.com/noir-lang/noir/compare/v0.37.1...v0.38.0) (2023-12-18) + + +### ⚠ BREAKING CHANGES + +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) + + +### Miscellaneous Chores + +* Remove partial backend feature ([#3805](https://github.com/noir-lang/noir/issues/3805)) ([0383100](https://github.com/noir-lang/noir/commit/0383100853a80a5b28b797cdfeae0d271f1b7805)) +* Remove unused methods on ACIR opcodes ([#3841](https://github.com/noir-lang/noir/issues/3841)) ([9e5d0e8](https://github.com/noir-lang/noir/commit/9e5d0e813d61a0bfb5ee68174ed287c5a20f1579)) + +## [0.37.1](https://github.com/noir-lang/noir/compare/v0.37.0...v0.37.1) (2023-12-15) + + +### Features + +* Aztec-packages ([#3754](https://github.com/noir-lang/noir/issues/3754)) ([c043265](https://github.com/noir-lang/noir/commit/c043265e550b59bd4296504826fe15d3ce3e9ad2)) +* Speed up transformation of debug messages ([#3815](https://github.com/noir-lang/noir/issues/3815)) ([2a8af1e](https://github.com/noir-lang/noir/commit/2a8af1e4141ffff61547ee1c2837a6392bd5db48)) + + +### Bug Fixes + +* Deserialize odd length hex literals ([#3747](https://github.com/noir-lang/noir/issues/3747)) ([4000fb2](https://github.com/noir-lang/noir/commit/4000fb279221eb07187d657bfaa7f1c7b311abf2)) + ## [0.37.0](https://github.com/noir-lang/noir/compare/v0.36.0...v0.37.0) (2023-12-01) @@ -611,7 +680,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Features -* add optimisations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) +* add optimizations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) * **stdlib:** Add fallback implementation of `Keccak256` black box function ([#445](https://github.com/noir-lang/acvm/issues/445)) ([f7ebb03](https://github.com/noir-lang/acvm/commit/f7ebb03653c971f119700ff8126d9eb5ff01be0f)) ## [0.20.0](https://github.com/noir-lang/acvm/compare/root-v0.19.1...root-v0.20.0) (2023-07-20) @@ -681,7 +750,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) ## [0.17.0](https://github.com/noir-lang/acvm/compare/root-v0.16.0...root-v0.17.0) (2023-07-07) @@ -940,7 +1009,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -963,7 +1032,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/root-v0.8.1...root-v0.9.0) (2023-04-07) @@ -1077,7 +1146,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -1101,7 +1170,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) ## [0.4.1] - 2023-02-08 diff --git a/acvm-repo/acir/CHANGELOG.md b/acvm-repo/acir/CHANGELOG.md index e31ee66379a..661980a87c9 100644 --- a/acvm-repo/acir/CHANGELOG.md +++ b/acvm-repo/acir/CHANGELOG.md @@ -404,7 +404,7 @@ * replace `MerkleMembership` opcode with `ComputeMerkleRoot` ([#233](https://github.com/noir-lang/acvm/issues/233)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -415,7 +415,7 @@ ### Miscellaneous Chores -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acir-v0.8.1...acir-v0.9.0) (2023-04-07) @@ -507,7 +507,7 @@ * **acir:** make PublicInputs use a BTreeSet rather than Vec ([#99](https://github.com/noir-lang/acvm/issues/99)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) * **acir:** Add keccak256 Opcode ([#91](https://github.com/noir-lang/acvm/issues/91)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -518,4 +518,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/acvm-repo/acir/Cargo.toml b/acvm-repo/acir/Cargo.toml index 100ab06aff0..49b10c57cc8 100644 --- a/acvm-repo/acir/Cargo.toml +++ b/acvm-repo/acir/Cargo.toml @@ -2,7 +2,7 @@ name = "acir" description = "ACIR is the IR that the VM processes, it is analogous to LLVM IR" # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -17,7 +17,7 @@ acir_field.workspace = true brillig.workspace = true serde.workspace = true thiserror.workspace = true -flate2 = "1.0.24" +flate2.workspace = true bincode.workspace = true base64.workspace = true diff --git a/acvm-repo/acir/acir_docs.md b/acvm-repo/acir/acir_docs.md index eb532c9ae0c..801aeac1140 100644 --- a/acvm-repo/acir/acir_docs.md +++ b/acvm-repo/acir/acir_docs.md @@ -6,14 +6,14 @@ This document describes the purpose of ACIR, what it is and how ACIR programs ca ## Introduction The purpose of ACIR is to make the link between a generic proving system, such as Aztec's Barretenberg, and a frontend, such as Noir, which describes user-specific computations. -More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems uses specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. +More precisely, Noir is a programming language for zero-knowledge proofs (ZKP) which allows users to write programs in an intuitive way using a high-level language close to Rust syntax. Noir is able to generate a proof of execution of a Noir program, using an external proving system. However, proving systems use specific low-level constrain-based languages. Similarly, frontends have their own internal representation in order to represent user programs. The goal of ACIR is to provide a generic open-source intermediate representation close to proving system 'languages', but agnostic to a specific proving system, that can be used both by proving system as well as a target for frontends. So, at the end of the day, an ACIR program is just another representation of a program, dedicated to proving systems. ## Abstract Circuit Intermediate Representation ACIR stands for abstract circuit intermediate representation: - **abstract circuit**: circuits are a simple computation model where basic computation units, named gates, are connected with wires. Data flows through the wires while gates compute output wires based on their input. More formally, they are directed acyclic graphs (DAG) where the vertices are the gates and the edges are the wires. Due to the immutability nature of the wires (their value does not change during an execution), they are well suited for describing computations for ZKPs. Furthermore, we do not lose any expressiveness when using a circuit as it is well known that any bounded computation can be translated into an arithmetic circuit (i.e a circuit with only addition and multiplication gates). -The term abstract here simply mean that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). +The term abstract here simply means that we do not refer to an actual physical circuit (such as an electronic circuit). Furthermore, we will not exactly use the circuit model, but another model even better suited to ZKPs, the constraint model (see below). - **intermediate representation**: The ACIR representation is intermediate because it lies between a frontend and its proving system. ACIR bytecode makes the link between noir compiler output and the proving system backend input. ## The constraint model @@ -32,18 +32,18 @@ For instance, if input_wire_1 and input_wire_2 values are supplied as 3 and 8, t In summary, the workflow is the following: 1. user program -> (compilation) ACIR, a list of opcodes which constrain (partial) witnesses 2. user inputs + ACIR -> (execution/solving) assign values to all the (partial) witnesses -3. witness assignement + ACIR -> (proving system) proof +3. witness assignment + ACIR -> (proving system) proof Although the ordering of opcode does not matter in theory, since a system of equations is not dependent on its ordering, in practice it matters a lot for the solving (i.e the performance of the execution). ACIR opcodes **must be ordered** so that each opcode can be resolved one after the other. -The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system need the values of all the partial witnesses and all the constraints in order to generate a proof. +The values of the witnesses lie in the scalar field of the proving system. We will refer to it as FieldElement or ACIR field. The proving system needs the values of all the partial witnesses and all the constraints in order to generate a proof. *Remark*: The value of a partial witness is unique and fixed throughout a program execution, although in some rare cases, multiple values are possible for a same execution and witness (when there are several valid solutions to the constraints). Having multiple possible values for a witness may indicate that the circuit is not safe. -*Remark*: Why do we use the term partial witnesses? It is because the proving system may create other constraints and witnesses (especially with BlackBoxFuncCall, see below). A proof refers to a full witness assignements and their constraints. ACIR opcodes and their partial witnesses are still an intermediate representation before getting the full list of constraints and witnesses. For the sake of simplicity, we will refer to witness instead of partial witness from now on. +*Remark*: Why do we use the term partial witnesses? It is because the proving system may create other constraints and witnesses (especially with BlackBoxFuncCall, see below). A proof refers to a full witness assignments and their constraints. ACIR opcodes and their partial witnesses are still an intermediate representation before getting the full list of constraints and witnesses. For the sake of simplicity, we will refer to witness instead of partial witness from now on. ## ACIR Reference @@ -51,18 +51,18 @@ We assume here that the proving system is Barretenberg. Some parameters may slig Some opcodes have inputs and outputs, which means that the output is constrained to be the result of the opcode computation from the inputs. The solver expects that all inputs are known when solving such opcodes. -Some opcodes are not constrained, which mean they will not be used by the proving system and are only used by the solver. +Some opcodes are not constrained, which means they will not be used by the proving system and are only used by the solver. Finally, some opcodes will have a predicate, whose value is 0 or 1. Its purpose is to nullify the opcode when the value is 0, so that it has no effect. Note that removing the opcode is not a solution because this modifies the circuit (the circuit being mainly the list of the opcodes). -*Remark*: Opcodes operate on witnesses, but we will see that some opcode work on Arithmetic expressions of witnesses. We call an arithmetic expression a linear combination of witnesses and/or products of two witnesses (and also a constant term). A single witness is a (simple) arithmetic expression, and conversly, an arithmetic expression can be turned into a single witness using an arithmetic opcode (see below). So basically, using witnesses or arithmetic expressions is equivalent, but the latter can avoid the creation of witness in some cases. +*Remark*: Opcodes operate on witnesses, but we will see that some opcode work on expressions of witnesses. We call an expression a linear combination of witnesses and/or products of two witnesses (and also a constant term). A single witness is a (simple) expression, and conversely, an expression can be turned into a single witness using an assert-zero opcode (see below). So basically, using witnesses or expressions is equivalent, but the latter can avoid the creation of witness in some cases. -### Arithmetic opcode -An arithmetic opcode adds the constraint that P(w) = 0, where w=(w_1,..w_n) is a tuple of n witnesses, and P is a multi-variate polynomial of total degree at most 2. +### AssertZero opcode +An AssertZero opcode adds the constraint that P(w) = 0, where w=(w_1,..w_n) is a tuple of n witnesses, and P is a multi-variate polynomial of total degree at most 2. The coefficients ${q_M}_{i,j}, q_i,q_c$ of the polynomial are known values which define the opcode. -A general expression of arithmetic opcode is the following: $\sum_{i,j} {q_M}_{i,j}w_iw_j + \sum_i q_iw_i +q_c = 0$ +A general expression of assert-zero opcode is the following: $\sum_{i,j} {q_M}_{i,j}w_iw_j + \sum_i q_iw_i +q_c = 0$ -An arithmetic opcode can be used to: +An assert-zero opcode can be used to: - **express a constraint** on witnesses; for instance to express that a witness $w$ is a boolean, you can add the opcode: $w*w-w=0$ - or, to **compute the value** of an arithmetic operation of some inputs. For instance, to multiply two witnesses $x$ and $y$, you would use the opcode $z-x*y=0$, which would constraint $z$ to be $x*y$. @@ -70,8 +70,8 @@ An arithmetic opcode can be used to: The solver expects that at most one witness is not known when executing the opcode. ### BlackBoxFuncCall opcode -These opcodes represent a specific computation. Even if any computation can be done using only arithmetic opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. -All black box functions takes as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. +These opcodes represent a specific computation. Even if any computation can be done using only assert-zero opcodes, it is not always efficient. Some proving systems, and in particular the proving system from Aztec, can implement several computations more efficiently using for instance look-up tables. The BlackBoxFuncCall opcode is used to ask the proving system to handle the computation by itself. +All black box functions take as input a tuple (witness, num_bits), where num_bits is a constant representing the bit size of the input witness, and they have one or several witnesses as output. Some more advanced computations assume that the proving system has an 'embedded curve'. It is a curve that cycle with the main curve of the proving system, i.e the scalar field of the embedded curve is the base field of the main one, and vice-versa. The curves used by the proving system are dependent on the proving system (and/or its configuration). Aztec's Barretenberg uses BN254 as the main curve and Grumpkin as the embedded curve. The black box functions supported by ACIR are: @@ -91,11 +91,11 @@ input: (witness, bit_size) **SHA256**: computes sha256 of the inputs - inputs are a byte array, i.e a vector of (FieldElement, 8) -- output is a byte array of len 32, i.e a vector of 32 (FieldElement, 8), constrainted to be the sha256 of the inputs. +- output is a byte array of len 32, i.e a vector of 32 (FieldElement, 8), constrained to be the sha256 of the inputs. **Blake2s**: computes the Blake2s hash of the inputs, as specified in https://tools.ietf.org/html/rfc7693 - inputs are a byte array, i.e a vector of (FieldElement, 8) -- output is a byte array of length 32, i.e a vector of 32 (FieldElement, 8), constrainted to be the blake2s of the inputs. +- output is a byte array of length 32, i.e a vector of 32 (FieldElement, 8), constrained to be the blake2s of the inputs. **SchnorrVerify**: Verify a Schnorr signature over the embedded curve @@ -114,7 +114,7 @@ The proving system decides how the message is to be hashed. Barretenberg uses Bl - output: 2 witnesses representing the x,y coordinates of the resulting Grumpkin point - domain separator: a constant public value (a field element) that you can use so that the commitment also depends on the domain separator. Noir uses 0 as domain separator. -The backend should handle proper conversion between the inputs being ACIR field elements and the scalar field of the embedded curve. In the case of Aztec's Barretenberg, the latter is bigger than the ACIR field so it is straightforward. The Peredersen generators are managed by the proving system. +The backend should handle proper conversion between the inputs being ACIR field elements and the scalar field of the embedded curve. In the case of Aztec's Barretenberg, the latter is bigger than the ACIR field so it is straightforward. The Pedersen generators are managed by the proving system. **PedersenHash**: Computes a Pedersen commitments of the inputs and their number, using generators of the embedded curve @@ -163,7 +163,7 @@ $a=low+high*2^{128},$ with $low, high < 2^{128}$ - verification_key: Vector of (FieldElement, 254) representing the verification key of the circuit being verified - public_inputs: Vector of (FieldElement, 254) representing the public inputs corresponding to the proof being verified - key_hash: one (FieldElement, 254). It should be the hash of the verification key. Barretenberg expects the Pedersen hash of the verification key -- input_aggregation_object: an optional vector of (FieldElement, 254). It is a blob of data specific to the proving sytem. +- input_aggregation_object: an optional vector of (FieldElement, 254). It is a blob of data specific to the proving system. - output_aggregation_object: Some witnesses returned by the function, representing some data internal to the proving system. This black box function does not fully verify a proof, what it does is verifying that the key_hash is indeed a hash of verification_key, allowing the user to use the verification key as private inputs and only have the key_hash as public input, which is more performant. @@ -179,18 +179,18 @@ This opcode is used as a hint for the solver when executing (solving) the circui - bytecode: assembly code representing the computation to perform within this opcode. The noir assembly specification is not part of this document. - predicate: an arithmetic expression that disable the opcode when it is null. -Let's see an example with euclidian division. -The normal way to compute a/b, where a and b are 8-bits integers, is to implement Euclid algorithm which computes in a loop (or recursively) modulos of the kind 'a mod b'. Doing this computation requires a lot of steps to be properly implemented in ACIR, especially the loop with a condition. However, euclidian division can be easily constrained with one arithmetic opcode: a = bq+r, assuming q is 8 bits and r); }; + struct Blake3 { + std::vector inputs; + std::vector outputs; + + friend bool operator==(const Blake3&, const Blake3&); + std::vector bincodeSerialize() const; + static Blake3 bincodeDeserialize(std::vector); + }; + struct SchnorrVerify { Circuit::FunctionInput public_key_x; Circuit::FunctionInput public_key_y; @@ -102,15 +111,6 @@ namespace Circuit { static PedersenHash bincodeDeserialize(std::vector); }; - struct HashToField128Security { - std::vector inputs; - Circuit::Witness output; - - friend bool operator==(const HashToField128Security&, const HashToField128Security&); - std::vector bincodeSerialize() const; - static HashToField128Security bincodeDeserialize(std::vector); - }; - struct EcdsaSecp256k1 { std::vector public_key_x; std::vector public_key_y; @@ -145,6 +145,28 @@ namespace Circuit { static FixedBaseScalarMul bincodeDeserialize(std::vector); }; + struct EmbeddedCurveAdd { + Circuit::FunctionInput input1_x; + Circuit::FunctionInput input1_y; + Circuit::FunctionInput input2_x; + Circuit::FunctionInput input2_y; + std::array outputs; + + friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); + std::vector bincodeSerialize() const; + static EmbeddedCurveAdd bincodeDeserialize(std::vector); + }; + + struct EmbeddedCurveDouble { + Circuit::FunctionInput input_x; + Circuit::FunctionInput input_y; + std::array outputs; + + friend bool operator==(const EmbeddedCurveDouble&, const EmbeddedCurveDouble&); + std::vector bincodeSerialize() const; + static EmbeddedCurveDouble bincodeDeserialize(std::vector); + }; + struct Keccak256 { std::vector inputs; std::vector outputs; @@ -164,20 +186,27 @@ namespace Circuit { static Keccak256VariableLength bincodeDeserialize(std::vector); }; + struct Keccakf1600 { + std::vector inputs; + std::vector outputs; + + friend bool operator==(const Keccakf1600&, const Keccakf1600&); + std::vector bincodeSerialize() const; + static Keccakf1600 bincodeDeserialize(std::vector); + }; + struct RecursiveAggregation { std::vector verification_key; std::vector proof; std::vector public_inputs; Circuit::FunctionInput key_hash; - std::optional> input_aggregation_object; - std::vector output_aggregation_object; friend bool operator==(const RecursiveAggregation&, const RecursiveAggregation&); std::vector bincodeSerialize() const; static RecursiveAggregation bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const BlackBoxFuncCall&, const BlackBoxFuncCall&); std::vector bincodeSerialize() const; @@ -353,6 +382,40 @@ namespace Circuit { static BinaryIntOp bincodeDeserialize(std::vector); }; + struct HeapValueType; + + struct HeapValueType { + + struct Simple { + friend bool operator==(const Simple&, const Simple&); + std::vector bincodeSerialize() const; + static Simple bincodeDeserialize(std::vector); + }; + + struct Array { + std::vector value_types; + uint64_t size; + + friend bool operator==(const Array&, const Array&); + std::vector bincodeSerialize() const; + static Array bincodeDeserialize(std::vector); + }; + + struct Vector { + std::vector value_types; + + friend bool operator==(const Vector&, const Vector&); + std::vector bincodeSerialize() const; + static Vector bincodeDeserialize(std::vector); + }; + + std::variant value; + + friend bool operator==(const HeapValueType&, const HeapValueType&); + std::vector bincodeSerialize() const; + static HeapValueType bincodeDeserialize(std::vector); + }; + struct RegisterIndex { uint64_t value; @@ -364,6 +427,7 @@ namespace Circuit { struct HeapArray { Circuit::RegisterIndex pointer; uint64_t size; + std::vector value_types; friend bool operator==(const HeapArray&, const HeapArray&); std::vector bincodeSerialize() const; @@ -373,6 +437,7 @@ namespace Circuit { struct HeapVector { Circuit::RegisterIndex pointer; Circuit::RegisterIndex size; + std::vector value_types; friend bool operator==(const HeapVector&, const HeapVector&); std::vector bincodeSerialize() const; @@ -399,6 +464,15 @@ namespace Circuit { static Blake2s bincodeDeserialize(std::vector); }; + struct Blake3 { + Circuit::HeapVector message; + Circuit::HeapArray output; + + friend bool operator==(const Blake3&, const Blake3&); + std::vector bincodeSerialize() const; + static Blake3 bincodeDeserialize(std::vector); + }; + struct Keccak256 { Circuit::HeapVector message; Circuit::HeapArray output; @@ -408,13 +482,13 @@ namespace Circuit { static Keccak256 bincodeDeserialize(std::vector); }; - struct HashToField128Security { + struct Keccakf1600 { Circuit::HeapVector message; - Circuit::RegisterIndex output; + Circuit::HeapArray output; - friend bool operator==(const HashToField128Security&, const HashToField128Security&); + friend bool operator==(const Keccakf1600&, const Keccakf1600&); std::vector bincodeSerialize() const; - static HashToField128Security bincodeDeserialize(std::vector); + static Keccakf1600 bincodeDeserialize(std::vector); }; struct EcdsaSecp256k1 { @@ -483,7 +557,29 @@ namespace Circuit { static FixedBaseScalarMul bincodeDeserialize(std::vector); }; - std::variant value; + struct EmbeddedCurveAdd { + Circuit::RegisterIndex input1_x; + Circuit::RegisterIndex input1_y; + Circuit::RegisterIndex input2_x; + Circuit::RegisterIndex input2_y; + Circuit::HeapArray result; + + friend bool operator==(const EmbeddedCurveAdd&, const EmbeddedCurveAdd&); + std::vector bincodeSerialize() const; + static EmbeddedCurveAdd bincodeDeserialize(std::vector); + }; + + struct EmbeddedCurveDouble { + Circuit::RegisterIndex input1_x; + Circuit::RegisterIndex input1_y; + Circuit::HeapArray result; + + friend bool operator==(const EmbeddedCurveDouble&, const EmbeddedCurveDouble&); + std::vector bincodeSerialize() const; + static EmbeddedCurveDouble bincodeDeserialize(std::vector); + }; + + std::variant value; friend bool operator==(const BlackBoxOp&, const BlackBoxOp&); std::vector bincodeSerialize() const; @@ -705,28 +801,8 @@ namespace Circuit { static Brillig bincodeDeserialize(std::vector); }; - struct QuotientDirective { - Circuit::Expression a; - Circuit::Expression b; - Circuit::Witness q; - Circuit::Witness r; - std::optional predicate; - - friend bool operator==(const QuotientDirective&, const QuotientDirective&); - std::vector bincodeSerialize() const; - static QuotientDirective bincodeDeserialize(std::vector); - }; - struct Directive { - struct Quotient { - Circuit::QuotientDirective value; - - friend bool operator==(const Quotient&, const Quotient&); - std::vector bincodeSerialize() const; - static Quotient bincodeDeserialize(std::vector); - }; - struct ToLeRadix { Circuit::Expression a; std::vector b; @@ -748,7 +824,7 @@ namespace Circuit { static PermutationSort bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const Directive&, const Directive&); std::vector bincodeSerialize() const; @@ -767,12 +843,12 @@ namespace Circuit { struct Opcode { - struct Arithmetic { + struct AssertZero { Circuit::Expression value; - friend bool operator==(const Arithmetic&, const Arithmetic&); + friend bool operator==(const AssertZero&, const AssertZero&); std::vector bincodeSerialize() const; - static Arithmetic bincodeDeserialize(std::vector); + static AssertZero bincodeDeserialize(std::vector); }; struct BlackBoxFuncCall { @@ -818,7 +894,7 @@ namespace Circuit { static MemoryInit bincodeDeserialize(std::vector); }; - std::variant value; + std::variant value; friend bool operator==(const Opcode&, const Opcode&); std::vector bincodeSerialize() const; @@ -1839,6 +1915,47 @@ Circuit::BlackBoxFuncCall::Blake2s serde::Deserializable BlackBoxFuncCall::Blake3::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::Blake3 BlackBoxFuncCall::Blake3::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::Blake3 &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::Blake3 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::Blake3 obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlackBoxFuncCall::SchnorrVerify &lhs, const BlackBoxFuncCall::SchnorrVerify &rhs) { @@ -1977,47 +2094,6 @@ Circuit::BlackBoxFuncCall::PedersenHash serde::Deserializable BlackBoxFuncCall::HashToField128Security::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline BlackBoxFuncCall::HashToField128Security BlackBoxFuncCall::HashToField128Security::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::HashToField128Security &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.inputs, serializer); - serde::Serializable::serialize(obj.output, serializer); -} - -template <> -template -Circuit::BlackBoxFuncCall::HashToField128Security serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::BlackBoxFuncCall::HashToField128Security obj; - obj.inputs = serde::Deserializable::deserialize(deserializer); - obj.output = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Circuit { inline bool operator==(const BlackBoxFuncCall::EcdsaSecp256k1 &lhs, const BlackBoxFuncCall::EcdsaSecp256k1 &rhs) { @@ -2162,6 +2238,100 @@ Circuit::BlackBoxFuncCall::FixedBaseScalarMul serde::Deserializable BlackBoxFuncCall::EmbeddedCurveAdd::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::EmbeddedCurveAdd BlackBoxFuncCall::EmbeddedCurveAdd::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::EmbeddedCurveAdd &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input1_x, serializer); + serde::Serializable::serialize(obj.input1_y, serializer); + serde::Serializable::serialize(obj.input2_x, serializer); + serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::EmbeddedCurveAdd serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::EmbeddedCurveAdd obj; + obj.input1_x = serde::Deserializable::deserialize(deserializer); + obj.input1_y = serde::Deserializable::deserialize(deserializer); + obj.input2_x = serde::Deserializable::deserialize(deserializer); + obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Circuit { + + inline bool operator==(const BlackBoxFuncCall::EmbeddedCurveDouble &lhs, const BlackBoxFuncCall::EmbeddedCurveDouble &rhs) { + if (!(lhs.input_x == rhs.input_x)) { return false; } + if (!(lhs.input_y == rhs.input_y)) { return false; } + if (!(lhs.outputs == rhs.outputs)) { return false; } + return true; + } + + inline std::vector BlackBoxFuncCall::EmbeddedCurveDouble::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::EmbeddedCurveDouble BlackBoxFuncCall::EmbeddedCurveDouble::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::EmbeddedCurveDouble &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input_x, serializer); + serde::Serializable::serialize(obj.input_y, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::EmbeddedCurveDouble serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::EmbeddedCurveDouble obj; + obj.input_x = serde::Deserializable::deserialize(deserializer); + obj.input_y = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlackBoxFuncCall::Keccak256 &lhs, const BlackBoxFuncCall::Keccak256 &rhs) { @@ -2247,6 +2417,47 @@ Circuit::BlackBoxFuncCall::Keccak256VariableLength serde::Deserializable BlackBoxFuncCall::Keccakf1600::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxFuncCall::Keccakf1600 BlackBoxFuncCall::Keccakf1600::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxFuncCall::Keccakf1600 &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.inputs, serializer); + serde::Serializable::serialize(obj.outputs, serializer); +} + +template <> +template +Circuit::BlackBoxFuncCall::Keccakf1600 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxFuncCall::Keccakf1600 obj; + obj.inputs = serde::Deserializable::deserialize(deserializer); + obj.outputs = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlackBoxFuncCall::RecursiveAggregation &lhs, const BlackBoxFuncCall::RecursiveAggregation &rhs) { @@ -2254,8 +2465,6 @@ namespace Circuit { if (!(lhs.proof == rhs.proof)) { return false; } if (!(lhs.public_inputs == rhs.public_inputs)) { return false; } if (!(lhs.key_hash == rhs.key_hash)) { return false; } - if (!(lhs.input_aggregation_object == rhs.input_aggregation_object)) { return false; } - if (!(lhs.output_aggregation_object == rhs.output_aggregation_object)) { return false; } return true; } @@ -2283,8 +2492,6 @@ void serde::Serializable::seria serde::Serializable::serialize(obj.proof, serializer); serde::Serializable::serialize(obj.public_inputs, serializer); serde::Serializable::serialize(obj.key_hash, serializer); - serde::Serializable::serialize(obj.input_aggregation_object, serializer); - serde::Serializable::serialize(obj.output_aggregation_object, serializer); } template <> @@ -2295,8 +2502,6 @@ Circuit::BlackBoxFuncCall::RecursiveAggregation serde::Deserializable::deserialize(deserializer); obj.public_inputs = serde::Deserializable::deserialize(deserializer); obj.key_hash = serde::Deserializable::deserialize(deserializer); - obj.input_aggregation_object = serde::Deserializable::deserialize(deserializer); - obj.output_aggregation_object = serde::Deserializable::deserialize(deserializer); return obj; } @@ -2426,21 +2631,21 @@ Circuit::BlackBoxOp::Blake2s serde::Deserializable namespace Circuit { - inline bool operator==(const BlackBoxOp::Keccak256 &lhs, const BlackBoxOp::Keccak256 &rhs) { + inline bool operator==(const BlackBoxOp::Blake3 &lhs, const BlackBoxOp::Blake3 &rhs) { if (!(lhs.message == rhs.message)) { return false; } if (!(lhs.output == rhs.output)) { return false; } return true; } - inline std::vector BlackBoxOp::Keccak256::bincodeSerialize() const { + inline std::vector BlackBoxOp::Blake3::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BlackBoxOp::Keccak256 BlackBoxOp::Keccak256::bincodeDeserialize(std::vector input) { + inline BlackBoxOp::Blake3 BlackBoxOp::Blake3::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2451,15 +2656,15 @@ namespace Circuit { template <> template -void serde::Serializable::serialize(const Circuit::BlackBoxOp::Keccak256 &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Circuit::BlackBoxOp::Blake3 &obj, Serializer &serializer) { serde::Serializable::serialize(obj.message, serializer); serde::Serializable::serialize(obj.output, serializer); } template <> template -Circuit::BlackBoxOp::Keccak256 serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::BlackBoxOp::Keccak256 obj; +Circuit::BlackBoxOp::Blake3 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxOp::Blake3 obj; obj.message = serde::Deserializable::deserialize(deserializer); obj.output = serde::Deserializable::deserialize(deserializer); return obj; @@ -2467,21 +2672,21 @@ Circuit::BlackBoxOp::Keccak256 serde::Deserializable BlackBoxOp::HashToField128Security::bincodeSerialize() const { + inline std::vector BlackBoxOp::Keccak256::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline BlackBoxOp::HashToField128Security BlackBoxOp::HashToField128Security::bincodeDeserialize(std::vector input) { + inline BlackBoxOp::Keccak256 BlackBoxOp::Keccak256::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -2492,15 +2697,15 @@ namespace Circuit { template <> template -void serde::Serializable::serialize(const Circuit::BlackBoxOp::HashToField128Security &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Circuit::BlackBoxOp::Keccak256 &obj, Serializer &serializer) { serde::Serializable::serialize(obj.message, serializer); serde::Serializable::serialize(obj.output, serializer); } template <> template -Circuit::BlackBoxOp::HashToField128Security serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::BlackBoxOp::HashToField128Security obj; +Circuit::BlackBoxOp::Keccak256 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxOp::Keccak256 obj; obj.message = serde::Deserializable::deserialize(deserializer); obj.output = serde::Deserializable::deserialize(deserializer); return obj; @@ -2508,9 +2713,50 @@ Circuit::BlackBoxOp::HashToField128Security serde::Deserializable BlackBoxOp::Keccakf1600::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::Keccakf1600 BlackBoxOp::Keccakf1600::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxOp::Keccakf1600 &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.message, serializer); + serde::Serializable::serialize(obj.output, serializer); +} + +template <> +template +Circuit::BlackBoxOp::Keccakf1600 serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxOp::Keccakf1600 obj; + obj.message = serde::Deserializable::deserialize(deserializer); + obj.output = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Circuit { + + inline bool operator==(const BlackBoxOp::EcdsaSecp256k1 &lhs, const BlackBoxOp::EcdsaSecp256k1 &rhs) { + if (!(lhs.hashed_msg == rhs.hashed_msg)) { return false; } + if (!(lhs.public_key_x == rhs.public_key_x)) { return false; } if (!(lhs.public_key_y == rhs.public_key_y)) { return false; } if (!(lhs.signature == rhs.signature)) { return false; } if (!(lhs.result == rhs.result)) { return false; } @@ -2788,6 +3034,100 @@ Circuit::BlackBoxOp::FixedBaseScalarMul serde::Deserializable BlackBoxOp::EmbeddedCurveAdd::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::EmbeddedCurveAdd BlackBoxOp::EmbeddedCurveAdd::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxOp::EmbeddedCurveAdd &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input1_x, serializer); + serde::Serializable::serialize(obj.input1_y, serializer); + serde::Serializable::serialize(obj.input2_x, serializer); + serde::Serializable::serialize(obj.input2_y, serializer); + serde::Serializable::serialize(obj.result, serializer); +} + +template <> +template +Circuit::BlackBoxOp::EmbeddedCurveAdd serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxOp::EmbeddedCurveAdd obj; + obj.input1_x = serde::Deserializable::deserialize(deserializer); + obj.input1_y = serde::Deserializable::deserialize(deserializer); + obj.input2_x = serde::Deserializable::deserialize(deserializer); + obj.input2_y = serde::Deserializable::deserialize(deserializer); + obj.result = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Circuit { + + inline bool operator==(const BlackBoxOp::EmbeddedCurveDouble &lhs, const BlackBoxOp::EmbeddedCurveDouble &rhs) { + if (!(lhs.input1_x == rhs.input1_x)) { return false; } + if (!(lhs.input1_y == rhs.input1_y)) { return false; } + if (!(lhs.result == rhs.result)) { return false; } + return true; + } + + inline std::vector BlackBoxOp::EmbeddedCurveDouble::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline BlackBoxOp::EmbeddedCurveDouble BlackBoxOp::EmbeddedCurveDouble::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::BlackBoxOp::EmbeddedCurveDouble &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.input1_x, serializer); + serde::Serializable::serialize(obj.input1_y, serializer); + serde::Serializable::serialize(obj.result, serializer); +} + +template <> +template +Circuit::BlackBoxOp::EmbeddedCurveDouble serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::BlackBoxOp::EmbeddedCurveDouble obj; + obj.input1_x = serde::Deserializable::deserialize(deserializer); + obj.input1_y = serde::Deserializable::deserialize(deserializer); + obj.result = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const BlockId &lhs, const BlockId &rhs) { @@ -3864,44 +4204,6 @@ Circuit::Directive serde::Deserializable::deserialize(Deseri return obj; } -namespace Circuit { - - inline bool operator==(const Directive::Quotient &lhs, const Directive::Quotient &rhs) { - if (!(lhs.value == rhs.value)) { return false; } - return true; - } - - inline std::vector Directive::Quotient::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline Directive::Quotient Directive::Quotient::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::Directive::Quotient &obj, Serializer &serializer) { - serde::Serializable::serialize(obj.value, serializer); -} - -template <> -template -Circuit::Directive::Quotient serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::Directive::Quotient obj; - obj.value = serde::Deserializable::deserialize(deserializer); - return obj; -} - namespace Circuit { inline bool operator==(const Directive::ToLeRadix &lhs, const Directive::ToLeRadix &rhs) { @@ -4091,6 +4393,7 @@ namespace Circuit { inline bool operator==(const HeapArray &lhs, const HeapArray &rhs) { if (!(lhs.pointer == rhs.pointer)) { return false; } if (!(lhs.size == rhs.size)) { return false; } + if (!(lhs.value_types == rhs.value_types)) { return false; } return true; } @@ -4117,6 +4420,7 @@ void serde::Serializable::serialize(const Circuit::HeapArray serializer.increase_container_depth(); serde::Serializable::serialize(obj.pointer, serializer); serde::Serializable::serialize(obj.size, serializer); + serde::Serializable::serialize(obj.value_types, serializer); serializer.decrease_container_depth(); } @@ -4127,15 +4431,173 @@ Circuit::HeapArray serde::Deserializable::deserialize(Deseri Circuit::HeapArray obj; obj.pointer = serde::Deserializable::deserialize(deserializer); obj.size = serde::Deserializable::deserialize(deserializer); + obj.value_types = serde::Deserializable::deserialize(deserializer); + deserializer.decrease_container_depth(); + return obj; +} + +namespace Circuit { + + inline bool operator==(const HeapValueType &lhs, const HeapValueType &rhs) { + if (!(lhs.value == rhs.value)) { return false; } + return true; + } + + inline std::vector HeapValueType::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline HeapValueType HeapValueType::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::HeapValueType &obj, Serializer &serializer) { + serializer.increase_container_depth(); + serde::Serializable::serialize(obj.value, serializer); + serializer.decrease_container_depth(); +} + +template <> +template +Circuit::HeapValueType serde::Deserializable::deserialize(Deserializer &deserializer) { + deserializer.increase_container_depth(); + Circuit::HeapValueType obj; + obj.value = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } +namespace Circuit { + + inline bool operator==(const HeapValueType::Simple &lhs, const HeapValueType::Simple &rhs) { + return true; + } + + inline std::vector HeapValueType::Simple::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline HeapValueType::Simple HeapValueType::Simple::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::HeapValueType::Simple &obj, Serializer &serializer) { +} + +template <> +template +Circuit::HeapValueType::Simple serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::HeapValueType::Simple obj; + return obj; +} + +namespace Circuit { + + inline bool operator==(const HeapValueType::Array &lhs, const HeapValueType::Array &rhs) { + if (!(lhs.value_types == rhs.value_types)) { return false; } + if (!(lhs.size == rhs.size)) { return false; } + return true; + } + + inline std::vector HeapValueType::Array::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline HeapValueType::Array HeapValueType::Array::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::HeapValueType::Array &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.value_types, serializer); + serde::Serializable::serialize(obj.size, serializer); +} + +template <> +template +Circuit::HeapValueType::Array serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::HeapValueType::Array obj; + obj.value_types = serde::Deserializable::deserialize(deserializer); + obj.size = serde::Deserializable::deserialize(deserializer); + return obj; +} + +namespace Circuit { + + inline bool operator==(const HeapValueType::Vector &lhs, const HeapValueType::Vector &rhs) { + if (!(lhs.value_types == rhs.value_types)) { return false; } + return true; + } + + inline std::vector HeapValueType::Vector::bincodeSerialize() const { + auto serializer = serde::BincodeSerializer(); + serde::Serializable::serialize(*this, serializer); + return std::move(serializer).bytes(); + } + + inline HeapValueType::Vector HeapValueType::Vector::bincodeDeserialize(std::vector input) { + auto deserializer = serde::BincodeDeserializer(input); + auto value = serde::Deserializable::deserialize(deserializer); + if (deserializer.get_buffer_offset() < input.size()) { + throw serde::deserialization_error("Some input bytes were not read"); + } + return value; + } + +} // end of namespace Circuit + +template <> +template +void serde::Serializable::serialize(const Circuit::HeapValueType::Vector &obj, Serializer &serializer) { + serde::Serializable::serialize(obj.value_types, serializer); +} + +template <> +template +Circuit::HeapValueType::Vector serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::HeapValueType::Vector obj; + obj.value_types = serde::Deserializable::deserialize(deserializer); + return obj; +} + namespace Circuit { inline bool operator==(const HeapVector &lhs, const HeapVector &rhs) { if (!(lhs.pointer == rhs.pointer)) { return false; } if (!(lhs.size == rhs.size)) { return false; } + if (!(lhs.value_types == rhs.value_types)) { return false; } return true; } @@ -4162,6 +4624,7 @@ void serde::Serializable::serialize(const Circuit::HeapVect serializer.increase_container_depth(); serde::Serializable::serialize(obj.pointer, serializer); serde::Serializable::serialize(obj.size, serializer); + serde::Serializable::serialize(obj.value_types, serializer); serializer.decrease_container_depth(); } @@ -4172,6 +4635,7 @@ Circuit::HeapVector serde::Deserializable::deserialize(Dese Circuit::HeapVector obj; obj.pointer = serde::Deserializable::deserialize(deserializer); obj.size = serde::Deserializable::deserialize(deserializer); + obj.value_types = serde::Deserializable::deserialize(deserializer); deserializer.decrease_container_depth(); return obj; } @@ -4268,20 +4732,20 @@ Circuit::Opcode serde::Deserializable::deserialize(Deserializer namespace Circuit { - inline bool operator==(const Opcode::Arithmetic &lhs, const Opcode::Arithmetic &rhs) { + inline bool operator==(const Opcode::AssertZero &lhs, const Opcode::AssertZero &rhs) { if (!(lhs.value == rhs.value)) { return false; } return true; } - inline std::vector Opcode::Arithmetic::bincodeSerialize() const { + inline std::vector Opcode::AssertZero::bincodeSerialize() const { auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); + serde::Serializable::serialize(*this, serializer); return std::move(serializer).bytes(); } - inline Opcode::Arithmetic Opcode::Arithmetic::bincodeDeserialize(std::vector input) { + inline Opcode::AssertZero Opcode::AssertZero::bincodeDeserialize(std::vector input) { auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); + auto value = serde::Deserializable::deserialize(deserializer); if (deserializer.get_buffer_offset() < input.size()) { throw serde::deserialization_error("Some input bytes were not read"); } @@ -4292,14 +4756,14 @@ namespace Circuit { template <> template -void serde::Serializable::serialize(const Circuit::Opcode::Arithmetic &obj, Serializer &serializer) { +void serde::Serializable::serialize(const Circuit::Opcode::AssertZero &obj, Serializer &serializer) { serde::Serializable::serialize(obj.value, serializer); } template <> template -Circuit::Opcode::Arithmetic serde::Deserializable::deserialize(Deserializer &deserializer) { - Circuit::Opcode::Arithmetic obj; +Circuit::Opcode::AssertZero serde::Deserializable::deserialize(Deserializer &deserializer) { + Circuit::Opcode::AssertZero obj; obj.value = serde::Deserializable::deserialize(deserializer); return obj; } @@ -4666,60 +5130,6 @@ Circuit::PublicInputs serde::Deserializable::deserialize( return obj; } -namespace Circuit { - - inline bool operator==(const QuotientDirective &lhs, const QuotientDirective &rhs) { - if (!(lhs.a == rhs.a)) { return false; } - if (!(lhs.b == rhs.b)) { return false; } - if (!(lhs.q == rhs.q)) { return false; } - if (!(lhs.r == rhs.r)) { return false; } - if (!(lhs.predicate == rhs.predicate)) { return false; } - return true; - } - - inline std::vector QuotientDirective::bincodeSerialize() const { - auto serializer = serde::BincodeSerializer(); - serde::Serializable::serialize(*this, serializer); - return std::move(serializer).bytes(); - } - - inline QuotientDirective QuotientDirective::bincodeDeserialize(std::vector input) { - auto deserializer = serde::BincodeDeserializer(input); - auto value = serde::Deserializable::deserialize(deserializer); - if (deserializer.get_buffer_offset() < input.size()) { - throw serde::deserialization_error("Some input bytes were not read"); - } - return value; - } - -} // end of namespace Circuit - -template <> -template -void serde::Serializable::serialize(const Circuit::QuotientDirective &obj, Serializer &serializer) { - serializer.increase_container_depth(); - serde::Serializable::serialize(obj.a, serializer); - serde::Serializable::serialize(obj.b, serializer); - serde::Serializable::serialize(obj.q, serializer); - serde::Serializable::serialize(obj.r, serializer); - serde::Serializable::serialize(obj.predicate, serializer); - serializer.decrease_container_depth(); -} - -template <> -template -Circuit::QuotientDirective serde::Deserializable::deserialize(Deserializer &deserializer) { - deserializer.increase_container_depth(); - Circuit::QuotientDirective obj; - obj.a = serde::Deserializable::deserialize(deserializer); - obj.b = serde::Deserializable::deserialize(deserializer); - obj.q = serde::Deserializable::deserialize(deserializer); - obj.r = serde::Deserializable::deserialize(deserializer); - obj.predicate = serde::Deserializable::deserialize(deserializer); - deserializer.decrease_container_depth(); - return obj; -} - namespace Circuit { inline bool operator==(const RegisterIndex &lhs, const RegisterIndex &rhs) { diff --git a/acvm-repo/acir/src/circuit/black_box_functions.rs b/acvm-repo/acir/src/circuit/black_box_functions.rs index 9129f44008c..d1f5560313b 100644 --- a/acvm-repo/acir/src/circuit/black_box_functions.rs +++ b/acvm-repo/acir/src/circuit/black_box_functions.rs @@ -1,8 +1,5 @@ //! Black box functions are ACIR opcodes which rely on backends implementing support for specialized constraints. //! This makes certain zk-snark unfriendly computations cheaper than if they were implemented in more basic constraints. -//! -//! It is possible to fallback to less efficient implementations written in ACIR in some cases. -//! These are implemented inside the ACVM stdlib. use serde::{Deserialize, Serialize}; #[cfg(test)] @@ -22,6 +19,8 @@ pub enum BlackBoxFunc { SHA256, /// Calculates the Blake2s hash of the inputs. Blake2s, + /// Calculates the Blake3 hash of the inputs. + Blake3, /// Verifies a Schnorr signature over a curve which is "pairing friendly" with the curve on which the ACIR circuit is defined. /// /// The exact curve which this signature uses will vary based on the curve being used by ACIR. @@ -33,12 +32,6 @@ pub enum BlackBoxFunc { PedersenCommitment, /// Calculates a Pedersen hash to the inputs. PedersenHash, - /// Hashes a set of inputs and applies the field modulus to the result - /// to return a value which can be represented as a [`FieldElement`][acir_field::FieldElement] - /// - /// This is implemented using the `Blake2s` hash function. - /// The "128" in the name specifies that this function should have 128 bits of security. - HashToField128Security, /// Verifies a ECDSA signature over the secp256k1 curve. EcdsaSecp256k1, /// Verifies a ECDSA signature over the secp256r1 curve. @@ -47,9 +40,15 @@ pub enum BlackBoxFunc { FixedBaseScalarMul, /// Calculates the Keccak256 hash of the inputs. Keccak256, + /// Keccak Permutation function of 1600 width + Keccakf1600, /// Compute a recursive aggregation object when verifying a proof inside another circuit. /// This outputted aggregation object will then be either checked in a top-level verifier or aggregated upon again. RecursiveAggregation, + /// Addition over the embedded curve on which [`FieldElement`][acir_field::FieldElement] is defined. + EmbeddedCurveAdd, + /// Point doubling over the embedded curve on which [`FieldElement`][acir_field::FieldElement] is defined. + EmbeddedCurveDouble, } impl std::fmt::Display for BlackBoxFunc { @@ -64,15 +63,18 @@ impl BlackBoxFunc { BlackBoxFunc::SHA256 => "sha256", BlackBoxFunc::SchnorrVerify => "schnorr_verify", BlackBoxFunc::Blake2s => "blake2s", - BlackBoxFunc::PedersenCommitment => "pedersen", + BlackBoxFunc::Blake3 => "blake3", + BlackBoxFunc::PedersenCommitment => "pedersen_commitment", BlackBoxFunc::PedersenHash => "pedersen_hash", - BlackBoxFunc::HashToField128Security => "hash_to_field_128_security", BlackBoxFunc::EcdsaSecp256k1 => "ecdsa_secp256k1", BlackBoxFunc::FixedBaseScalarMul => "fixed_base_scalar_mul", + BlackBoxFunc::EmbeddedCurveAdd => "ec_add", + BlackBoxFunc::EmbeddedCurveDouble => "ec_double", BlackBoxFunc::AND => "and", BlackBoxFunc::XOR => "xor", BlackBoxFunc::RANGE => "range", BlackBoxFunc::Keccak256 => "keccak256", + BlackBoxFunc::Keccakf1600 => "keccakf1600", BlackBoxFunc::RecursiveAggregation => "recursive_aggregation", BlackBoxFunc::EcdsaSecp256r1 => "ecdsa_secp256r1", } @@ -82,16 +84,19 @@ impl BlackBoxFunc { "sha256" => Some(BlackBoxFunc::SHA256), "schnorr_verify" => Some(BlackBoxFunc::SchnorrVerify), "blake2s" => Some(BlackBoxFunc::Blake2s), - "pedersen" => Some(BlackBoxFunc::PedersenCommitment), + "blake3" => Some(BlackBoxFunc::Blake3), + "pedersen_commitment" => Some(BlackBoxFunc::PedersenCommitment), "pedersen_hash" => Some(BlackBoxFunc::PedersenHash), - "hash_to_field_128_security" => Some(BlackBoxFunc::HashToField128Security), "ecdsa_secp256k1" => Some(BlackBoxFunc::EcdsaSecp256k1), "ecdsa_secp256r1" => Some(BlackBoxFunc::EcdsaSecp256r1), "fixed_base_scalar_mul" => Some(BlackBoxFunc::FixedBaseScalarMul), + "ec_add" => Some(BlackBoxFunc::EmbeddedCurveAdd), + "ec_double" => Some(BlackBoxFunc::EmbeddedCurveDouble), "and" => Some(BlackBoxFunc::AND), "xor" => Some(BlackBoxFunc::XOR), "range" => Some(BlackBoxFunc::RANGE), "keccak256" => Some(BlackBoxFunc::Keccak256), + "keccakf1600" => Some(BlackBoxFunc::Keccakf1600), "recursive_aggregation" => Some(BlackBoxFunc::RecursiveAggregation), _ => None, } diff --git a/acvm-repo/acir/src/circuit/directives.rs b/acvm-repo/acir/src/circuit/directives.rs index a86eb525c1f..2486f4cfb83 100644 --- a/acvm-repo/acir/src/circuit/directives.rs +++ b/acvm-repo/acir/src/circuit/directives.rs @@ -1,23 +1,11 @@ use crate::native_types::{Expression, Witness}; use serde::{Deserialize, Serialize}; -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] -pub struct QuotientDirective { - pub a: Expression, - pub b: Expression, - pub q: Witness, - pub r: Witness, - pub predicate: Option, -} - #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] /// Directives do not apply any constraints. /// You can think of them as opcodes that allow one to use non-determinism /// In the future, this can be replaced with asm non-determinism blocks pub enum Directive { - //Performs euclidean division of a / b (as integers) and stores the quotient in q and the rest in r - Quotient(QuotientDirective), - //decomposition of a: a=\sum b[i]*radix^i where b is an array of witnesses < radix in little endian form ToLeRadix { a: Expression, @@ -34,13 +22,3 @@ pub enum Directive { sort_by: Vec, // specify primary index to sort by, then the secondary,... For instance, if tuple is 2 and sort_by is [1,0], then a=[(a0,b0),..] is sorted by bi and then ai. }, } - -impl Directive { - pub fn name(&self) -> &str { - match self { - Directive::Quotient(_) => "quotient", - Directive::ToLeRadix { .. } => "to_le_radix", - Directive::PermutationSort { .. } => "permutation_sort", - } - } -} diff --git a/acvm-repo/acir/src/circuit/mod.rs b/acvm-repo/acir/src/circuit/mod.rs index 99ab389e31e..b248b30b1d9 100644 --- a/acvm-repo/acir/src/circuit/mod.rs +++ b/acvm-repo/acir/src/circuit/mod.rs @@ -99,7 +99,7 @@ impl FromStr for OpcodeLocation { let brillig_index = parts[1].parse()?; Ok(OpcodeLocation::Brillig { acir_index, brillig_index }) } - _ => unreachable!(), + _ => unreachable!("`OpcodeLocation` has too many components"), } } @@ -250,6 +250,64 @@ mod tests { input: FunctionInput { witness: Witness(1), num_bits: 8 }, }) } + fn keccakf1600_opcode() -> Opcode { + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccakf1600 { + inputs: vec![ + FunctionInput { witness: Witness(1), num_bits: 64 }, + FunctionInput { witness: Witness(2), num_bits: 64 }, + FunctionInput { witness: Witness(3), num_bits: 64 }, + FunctionInput { witness: Witness(4), num_bits: 64 }, + FunctionInput { witness: Witness(5), num_bits: 64 }, + FunctionInput { witness: Witness(6), num_bits: 64 }, + FunctionInput { witness: Witness(7), num_bits: 64 }, + FunctionInput { witness: Witness(8), num_bits: 64 }, + FunctionInput { witness: Witness(9), num_bits: 64 }, + FunctionInput { witness: Witness(10), num_bits: 64 }, + FunctionInput { witness: Witness(11), num_bits: 64 }, + FunctionInput { witness: Witness(12), num_bits: 64 }, + FunctionInput { witness: Witness(13), num_bits: 64 }, + FunctionInput { witness: Witness(14), num_bits: 64 }, + FunctionInput { witness: Witness(15), num_bits: 64 }, + FunctionInput { witness: Witness(16), num_bits: 64 }, + FunctionInput { witness: Witness(17), num_bits: 64 }, + FunctionInput { witness: Witness(18), num_bits: 64 }, + FunctionInput { witness: Witness(19), num_bits: 64 }, + FunctionInput { witness: Witness(20), num_bits: 64 }, + FunctionInput { witness: Witness(21), num_bits: 64 }, + FunctionInput { witness: Witness(22), num_bits: 64 }, + FunctionInput { witness: Witness(23), num_bits: 64 }, + FunctionInput { witness: Witness(24), num_bits: 64 }, + FunctionInput { witness: Witness(25), num_bits: 64 }, + ], + outputs: vec![ + Witness(26), + Witness(27), + Witness(28), + Witness(29), + Witness(30), + Witness(31), + Witness(32), + Witness(33), + Witness(34), + Witness(35), + Witness(36), + Witness(37), + Witness(38), + Witness(39), + Witness(40), + Witness(41), + Witness(42), + Witness(43), + Witness(44), + Witness(45), + Witness(46), + Witness(47), + Witness(48), + Witness(49), + Witness(50), + ], + }) + } #[test] fn serialization_roundtrip() { @@ -277,13 +335,14 @@ mod tests { let circuit = Circuit { current_witness_index: 0, opcodes: vec![ - Opcode::Arithmetic(crate::native_types::Expression { + Opcode::AssertZero(crate::native_types::Expression { mul_terms: vec![], linear_combinations: vec![], q_c: FieldElement::from(8u128), }), range_opcode(), and_opcode(), + keccakf1600_opcode(), ], private_parameters: BTreeSet::new(), public_parameters: PublicInputs(BTreeSet::from_iter(vec![Witness(2)])), diff --git a/acvm-repo/acir/src/circuit/opcodes.rs b/acvm-repo/acir/src/circuit/opcodes.rs index dc7f73b47e5..5aab9d4d472 100644 --- a/acvm-repo/acir/src/circuit/opcodes.rs +++ b/acvm-repo/acir/src/circuit/opcodes.rs @@ -1,7 +1,4 @@ -use super::{ - brillig::Brillig, - directives::{Directive, QuotientDirective}, -}; +use super::{brillig::Brillig, directives::Directive}; use crate::native_types::{Expression, Witness}; use serde::{Deserialize, Serialize}; @@ -13,7 +10,7 @@ pub use memory_operation::{BlockId, MemOp}; #[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum Opcode { - Arithmetic(Expression), + AssertZero(Expression), /// Calls to "gadgets" which rely on backends implementing support for specialized constraints. /// /// Often used for exposing more efficient implementations of SNARK-unfriendly computations. @@ -33,62 +30,10 @@ pub enum Opcode { }, } -#[derive(Clone, PartialEq, Eq, Debug)] -pub enum UnsupportedMemoryOpcode { - MemoryOp, - MemoryInit, -} - -impl std::fmt::Display for UnsupportedMemoryOpcode { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - UnsupportedMemoryOpcode::MemoryOp => write!(f, "MemoryOp"), - UnsupportedMemoryOpcode::MemoryInit => write!(f, "MemoryInit"), - } - } -} - -impl Opcode { - // TODO We can add a domain separator by doing something like: - // TODO concat!("directive:", directive.name) - pub fn name(&self) -> &str { - match self { - Opcode::Arithmetic(_) => "arithmetic", - Opcode::Directive(directive) => directive.name(), - Opcode::BlackBoxFuncCall(g) => g.name(), - Opcode::Brillig(_) => "brillig", - Opcode::MemoryOp { .. } => "mem", - Opcode::MemoryInit { .. } => "init memory block", - } - } - - pub fn unsupported_opcode(&self) -> UnsupportedMemoryOpcode { - match self { - Opcode::MemoryOp { .. } => UnsupportedMemoryOpcode::MemoryOp, - Opcode::MemoryInit { .. } => UnsupportedMemoryOpcode::MemoryInit, - Opcode::BlackBoxFuncCall(_) => { - unreachable!("Unsupported Blackbox function should not be reported here") - } - _ => unreachable!("Opcode is supported"), - } - } - - pub fn is_arithmetic(&self) -> bool { - matches!(self, Opcode::Arithmetic(_)) - } - - pub fn arithmetic(self) -> Option { - match self { - Opcode::Arithmetic(expr) => Some(expr), - _ => None, - } - } -} - impl std::fmt::Display for Opcode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Opcode::Arithmetic(expr) => { + Opcode::AssertZero(expr) => { write!(f, "EXPR [ ")?; for i in &expr.mul_terms { write!(f, "({}, _{}, _{}) ", i.0, i.1.witness_index(), i.2.witness_index())?; @@ -100,21 +45,7 @@ impl std::fmt::Display for Opcode { write!(f, " ]") } - Opcode::Directive(Directive::Quotient(QuotientDirective { a, b, q, r, predicate })) => { - write!(f, "DIR::QUOTIENT ")?; - if let Some(pred) = predicate { - writeln!(f, "PREDICATE = {pred}")?; - } - write!( - f, - "(out : _{}, (_{}, {}), _{})", - a, - q.witness_index(), - b, - r.witness_index() - ) - } Opcode::BlackBoxFuncCall(g) => write!(f, "{g}"), Opcode::Directive(Directive::ToLeRadix { a, b, radix: _ }) => { write!(f, "DIR::TORADIX ")?; diff --git a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs index 70821913836..7ee4e2498a5 100644 --- a/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs +++ b/acvm-repo/acir/src/circuit/opcodes/black_box_function_call.rs @@ -39,6 +39,10 @@ pub enum BlackBoxFuncCall { inputs: Vec, outputs: Vec, }, + Blake3 { + inputs: Vec, + outputs: Vec, + }, SchnorrVerify { public_key_x: FunctionInput, public_key_y: FunctionInput, @@ -56,12 +60,6 @@ pub enum BlackBoxFuncCall { domain_separator: u32, output: Witness, }, - // 128 here specifies that this function - // should have 128 bits of security - HashToField128Security { - inputs: Vec, - output: Witness, - }, EcdsaSecp256k1 { public_key_x: Vec, public_key_y: Vec, @@ -81,6 +79,18 @@ pub enum BlackBoxFuncCall { high: FunctionInput, outputs: (Witness, Witness), }, + EmbeddedCurveAdd { + input1_x: FunctionInput, + input1_y: FunctionInput, + input2_x: FunctionInput, + input2_y: FunctionInput, + outputs: (Witness, Witness), + }, + EmbeddedCurveDouble { + input_x: FunctionInput, + input_y: FunctionInput, + outputs: (Witness, Witness), + }, Keccak256 { inputs: Vec, outputs: Vec, @@ -94,6 +104,10 @@ pub enum BlackBoxFuncCall { var_message_size: FunctionInput, outputs: Vec, }, + Keccakf1600 { + inputs: Vec, + outputs: Vec, + }, RecursiveAggregation { verification_key: Vec, proof: Vec, @@ -105,17 +119,6 @@ pub enum BlackBoxFuncCall { /// The circuit implementing this opcode can use this hash to ensure that the /// key provided to the circuit matches the key produced by the circuit creator key_hash: FunctionInput, - /// An aggregation object is blob of data that the top-level verifier must run some proof system specific - /// algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. - /// The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in - /// the current circuit. If this is the first recursive aggregation there is no input aggregation object. - /// It is left to the backend to determine how to handle when there is no input aggregation object. - input_aggregation_object: Option>, - /// This is the result of a recursive aggregation and is what will be fed into the next verifier. - /// The next verifier can either perform a final verification (returning true or false) - /// or perform another recursive aggregation where this output aggregation object - /// will be the input aggregation object of the next recursive aggregation. - output_aggregation_object: Vec, }, } @@ -127,15 +130,18 @@ impl BlackBoxFuncCall { BlackBoxFuncCall::RANGE { .. } => BlackBoxFunc::RANGE, BlackBoxFuncCall::SHA256 { .. } => BlackBoxFunc::SHA256, BlackBoxFuncCall::Blake2s { .. } => BlackBoxFunc::Blake2s, + BlackBoxFuncCall::Blake3 { .. } => BlackBoxFunc::Blake3, BlackBoxFuncCall::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, BlackBoxFuncCall::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, BlackBoxFuncCall::PedersenHash { .. } => BlackBoxFunc::PedersenHash, - BlackBoxFuncCall::HashToField128Security { .. } => BlackBoxFunc::HashToField128Security, BlackBoxFuncCall::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, BlackBoxFuncCall::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, BlackBoxFuncCall::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, + BlackBoxFuncCall::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, + BlackBoxFuncCall::EmbeddedCurveDouble { .. } => BlackBoxFunc::EmbeddedCurveDouble, BlackBoxFuncCall::Keccak256 { .. } => BlackBoxFunc::Keccak256, BlackBoxFuncCall::Keccak256VariableLength { .. } => BlackBoxFunc::Keccak256, + BlackBoxFuncCall::Keccakf1600 { .. } => BlackBoxFunc::Keccakf1600, BlackBoxFuncCall::RecursiveAggregation { .. } => BlackBoxFunc::RecursiveAggregation, } } @@ -148,14 +154,21 @@ impl BlackBoxFuncCall { match self { BlackBoxFuncCall::SHA256 { inputs, .. } | BlackBoxFuncCall::Blake2s { inputs, .. } + | BlackBoxFuncCall::Blake3 { inputs, .. } | BlackBoxFuncCall::Keccak256 { inputs, .. } + | BlackBoxFuncCall::Keccakf1600 { inputs, .. } | BlackBoxFuncCall::PedersenCommitment { inputs, .. } - | BlackBoxFuncCall::PedersenHash { inputs, .. } - | BlackBoxFuncCall::HashToField128Security { inputs, .. } => inputs.to_vec(), + | BlackBoxFuncCall::PedersenHash { inputs, .. } => inputs.to_vec(), BlackBoxFuncCall::AND { lhs, rhs, .. } | BlackBoxFuncCall::XOR { lhs, rhs, .. } => { vec![*lhs, *rhs] } BlackBoxFuncCall::FixedBaseScalarMul { low, high, .. } => vec![*low, *high], + BlackBoxFuncCall::EmbeddedCurveAdd { + input1_x, input1_y, input2_x, input2_y, .. + } => vec![*input1_x, *input1_y, *input2_x, *input2_y], + BlackBoxFuncCall::EmbeddedCurveDouble { input_x, input_y, .. } => { + vec![*input_x, *input_y] + } BlackBoxFuncCall::RANGE { input } => vec![*input], BlackBoxFuncCall::SchnorrVerify { public_key_x, @@ -219,16 +232,12 @@ impl BlackBoxFuncCall { proof, public_inputs, key_hash, - .. } => { let mut inputs = Vec::new(); inputs.extend(key.iter().copied()); inputs.extend(proof.iter().copied()); inputs.extend(public_inputs.iter().copied()); inputs.push(*key_hash); - // NOTE: we do not return an input aggregation object as it will either be non-existent for the first recursive aggregation - // or the output aggregation object of a previous recursive aggregation. We do not simulate recursive aggregation - // thus the input aggregation object will always be unassigned until proving inputs } } @@ -238,20 +247,22 @@ impl BlackBoxFuncCall { match self { BlackBoxFuncCall::SHA256 { outputs, .. } | BlackBoxFuncCall::Blake2s { outputs, .. } + | BlackBoxFuncCall::Blake3 { outputs, .. } | BlackBoxFuncCall::Keccak256 { outputs, .. } - | BlackBoxFuncCall::RecursiveAggregation { - output_aggregation_object: outputs, .. - } => outputs.to_vec(), + | BlackBoxFuncCall::Keccakf1600 { outputs, .. } => outputs.to_vec(), BlackBoxFuncCall::AND { output, .. } | BlackBoxFuncCall::XOR { output, .. } - | BlackBoxFuncCall::HashToField128Security { output, .. } | BlackBoxFuncCall::SchnorrVerify { output, .. } | BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | BlackBoxFuncCall::PedersenHash { output, .. } | BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } => vec![*output], BlackBoxFuncCall::FixedBaseScalarMul { outputs, .. } - | BlackBoxFuncCall::PedersenCommitment { outputs, .. } => vec![outputs.0, outputs.1], - BlackBoxFuncCall::RANGE { .. } => vec![], + | BlackBoxFuncCall::PedersenCommitment { outputs, .. } + | BlackBoxFuncCall::EmbeddedCurveAdd { outputs, .. } + | BlackBoxFuncCall::EmbeddedCurveDouble { outputs, .. } => vec![outputs.0, outputs.1], + BlackBoxFuncCall::RANGE { .. } | BlackBoxFuncCall::RecursiveAggregation { .. } => { + vec![] + } BlackBoxFuncCall::Keccak256VariableLength { outputs, .. } => outputs.to_vec(), } } diff --git a/acvm-repo/acir/src/lib.rs b/acvm-repo/acir/src/lib.rs index b7bcaa0c5c0..50de09c5ad9 100644 --- a/acvm-repo/acir/src/lib.rs +++ b/acvm-repo/acir/src/lib.rs @@ -32,7 +32,8 @@ mod reflection { }; use brillig::{ - BinaryFieldOp, BinaryIntOp, BlackBoxOp, Opcode as BrilligOpcode, RegisterOrMemory, + BinaryFieldOp, BinaryIntOp, BlackBoxOp, HeapValueType, Opcode as BrilligOpcode, + RegisterOrMemory, }; use serde_reflection::{Tracer, TracerConfig}; @@ -70,6 +71,7 @@ mod reflection { tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); tracer.trace_simple_type::().unwrap(); + tracer.trace_simple_type::().unwrap(); let registry = tracer.registry().unwrap(); diff --git a/acvm-repo/acir/src/native_types/expression/mod.rs b/acvm-repo/acir/src/native_types/expression/mod.rs index fe729720663..402aa3eb3a6 100644 --- a/acvm-repo/acir/src/native_types/expression/mod.rs +++ b/acvm-repo/acir/src/native_types/expression/mod.rs @@ -8,7 +8,7 @@ mod ordering; // In the addition polynomial // We can have arbitrary fan-in/out, so we need more than wL,wR and wO -// When looking at the arithmetic opcode for the quotient polynomial in standard plonk +// When looking at the assert-zero opcode for the quotient polynomial in standard plonk // You can think of it as fan-in 2 and fan out-1 , or you can think of it as fan-in 1 and fan-out 2 // // In the multiplication polynomial @@ -16,7 +16,7 @@ mod ordering; #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] pub struct Expression { // To avoid having to create intermediate variables pre-optimization - // We collect all of the multiplication terms in the arithmetic opcode + // We collect all of the multiplication terms in the assert-zero opcode // A multiplication term if of the form q_M * wL * wR // Hence this vector represents the following sum: q_M1 * wL1 * wR1 + q_M2 * wL2 * wR2 + .. + pub mul_terms: Vec<(FieldElement, Witness, Witness)>, @@ -42,7 +42,7 @@ impl std::fmt::Display for Expression { if let Some(witness) = self.to_witness() { write!(f, "x{}", witness.witness_index()) } else { - write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::Arithmetic(self.clone())) + write!(f, "%{:?}%", crate::circuit::opcodes::Opcode::AssertZero(self.clone())) } } } @@ -178,7 +178,13 @@ impl Expression { self.linear_combinations.sort_by(|a, b| a.1.cmp(&b.1)); } - /// Checks if this polynomial can fit into one arithmetic identity + /// Checks if this expression can fit into one arithmetic identity + /// TODO: This needs to be reworded, arithmetic identity only makes sense in the context + /// TODO of PLONK, whereas we want expressions to be generic. + /// TODO: We just need to reword it to say exactly what its doing and + /// TODO then reference the fact that this is what plonk will accept. + /// TODO alternatively, we can define arithmetic identity in the context of expressions + /// TODO and then reference that. pub fn fits_in_one_identity(&self, width: usize) -> bool { // A Polynomial with more than one mul term cannot fit into one opcode if self.mul_terms.len() > 1 { diff --git a/acvm-repo/acir/src/native_types/expression/operators.rs b/acvm-repo/acir/src/native_types/expression/operators.rs index 35a548a2e3f..29cdc6967bb 100644 --- a/acvm-repo/acir/src/native_types/expression/operators.rs +++ b/acvm-repo/acir/src/native_types/expression/operators.rs @@ -230,7 +230,7 @@ fn single_mul(w: Witness, b: &Expression) -> Expression { } #[test] -fn add_smoketest() { +fn add_smoke_test() { let a = Expression { mul_terms: vec![], linear_combinations: vec![(FieldElement::from(2u128), Witness(2))], @@ -260,7 +260,7 @@ fn add_smoketest() { } #[test] -fn mul_smoketest() { +fn mul_smoke_test() { let a = Expression { mul_terms: vec![], linear_combinations: vec![(FieldElement::from(2u128), Witness(2))], diff --git a/acvm-repo/acir/tests/test_program_serialization.rs b/acvm-repo/acir/tests/test_program_serialization.rs index ff69ba34437..8df4f992fb2 100644 --- a/acvm-repo/acir/tests/test_program_serialization.rs +++ b/acvm-repo/acir/tests/test_program_serialization.rs @@ -20,11 +20,11 @@ use acir::{ native_types::{Expression, Witness}, }; use acir_field::FieldElement; -use brillig::{HeapArray, RegisterIndex, RegisterOrMemory}; +use brillig::{HeapArray, HeapValueType, RegisterIndex, RegisterOrMemory}; #[test] fn addition_circuit() { - let addition = Opcode::Arithmetic(Expression { + let addition = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(1)), @@ -102,9 +102,9 @@ fn pedersen_circuit() { let bytes = Circuit::serialize_circuit(&circuit); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 250, 255, 139, - 163, 162, 130, 72, 16, 149, 241, 3, 135, 84, 164, 172, 173, 213, 175, 251, 45, 198, 96, - 243, 211, 50, 152, 67, 220, 211, 92, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 232, 255, 31, 142, + 138, 10, 34, 65, 84, 198, 15, 28, 82, 145, 178, 182, 86, 191, 238, 183, 24, 131, 205, 79, + 203, 0, 166, 242, 158, 93, 92, 0, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -145,7 +145,7 @@ fn schnorr_verify_circuit() { let expected_serialization: Vec = vec![ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 210, 87, 78, 2, 1, 20, 134, 209, 177, 247, 222, 123, 71, 68, 68, 68, 68, 68, 68, 68, 68, 68, 221, 133, 251, 95, 130, 145, 27, 206, 36, 78, 50, - 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, 193, 19, 142, 241, 183, 255, 14, 179, + 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, 193, 19, 142, 243, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, 25, 206, 114, 142, 243, 92, 224, 34, 151, 184, 204, 21, 174, 114, 141, 235, 220, 224, 38, 183, 184, 205, 29, 238, 114, 143, 251, 60, 224, 33, 143, 120, 204, 19, 158, 242, 140, 25, 158, 51, @@ -158,7 +158,7 @@ fn schnorr_verify_circuit() { 91, 159, 218, 56, 99, 219, 172, 77, 115, 182, 204, 219, 176, 96, 187, 162, 205, 74, 182, 42, 219, 168, 98, 155, 170, 77, 106, 182, 168, 219, 160, 225, 246, 77, 55, 111, 185, 113, 219, 109, 59, 110, 218, 117, 203, 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, - 19, 89, 159, 101, 220, 3, 0, 0, + 55, 204, 92, 74, 220, 3, 0, 0, ]; assert_eq!(bytes, expected_serialization) @@ -245,11 +245,19 @@ fn complex_brillig_foreign_call() { brillig::Opcode::ForeignCall { function: "complex".into(), inputs: vec![ - RegisterOrMemory::HeapArray(HeapArray { pointer: 0.into(), size: 3 }), + RegisterOrMemory::HeapArray(HeapArray { + pointer: 0.into(), + size: 3, + value_types: vec![HeapValueType::Simple], + }), RegisterOrMemory::RegisterIndex(RegisterIndex::from(1)), ], destinations: vec![ - RegisterOrMemory::HeapArray(HeapArray { pointer: 0.into(), size: 3 }), + RegisterOrMemory::HeapArray(HeapArray { + pointer: 0.into(), + size: 3, + value_types: vec![HeapValueType::Simple], + }), RegisterOrMemory::RegisterIndex(RegisterIndex::from(1)), RegisterOrMemory::RegisterIndex(RegisterIndex::from(2)), ], @@ -269,13 +277,13 @@ fn complex_brillig_foreign_call() { let bytes = Circuit::serialize_circuit(&circuit); let expected_serialization: Vec = vec![ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 83, 219, 10, 128, 48, 8, 117, 174, 139, 159, 179, - 254, 160, 127, 137, 222, 138, 122, 236, 243, 19, 114, 32, 22, 244, 144, 131, 118, 64, 156, - 178, 29, 14, 59, 74, 0, 16, 224, 66, 228, 64, 57, 7, 169, 53, 242, 189, 81, 114, 250, 134, - 33, 248, 113, 165, 82, 26, 177, 2, 141, 177, 128, 198, 60, 15, 63, 245, 219, 211, 23, 215, - 255, 139, 15, 251, 211, 112, 180, 28, 157, 212, 189, 100, 82, 179, 64, 170, 63, 109, 235, - 190, 204, 135, 166, 178, 150, 216, 62, 154, 252, 250, 70, 147, 35, 220, 119, 93, 227, 4, - 182, 131, 81, 25, 36, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 219, 10, 128, 48, 8, 117, 174, 203, 62, 103, + 253, 65, 255, 18, 189, 21, 245, 216, 231, 55, 200, 193, 193, 122, 137, 28, 180, 3, 226, 20, + 39, 135, 29, 103, 32, 34, 71, 23, 124, 50, 150, 179, 147, 24, 145, 235, 70, 241, 241, 27, + 6, 103, 215, 43, 150, 226, 200, 21, 112, 244, 5, 56, 230, 121, 248, 169, 222, 150, 186, + 152, 190, 159, 127, 248, 63, 77, 178, 54, 89, 39, 113, 47, 62, 192, 44, 4, 200, 79, 219, + 186, 47, 243, 129, 173, 180, 36, 152, 211, 49, 43, 255, 234, 62, 22, 48, 221, 119, 0, 226, + 4, 104, 45, 56, 241, 60, 4, 0, 0, ]; assert_eq!(bytes, expected_serialization) diff --git a/acvm-repo/acir_field/Cargo.toml b/acvm-repo/acir_field/Cargo.toml index 76030bc8863..dde121f4029 100644 --- a/acvm-repo/acir_field/Cargo.toml +++ b/acvm-repo/acir_field/Cargo.toml @@ -2,7 +2,7 @@ name = "acir_field" description = "The field implementation being used by ACIR." # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/acir_field/src/generic_ark.rs b/acvm-repo/acir_field/src/generic_ark.rs index 3cf906724d3..dc54d271beb 100644 --- a/acvm-repo/acir_field/src/generic_ark.rs +++ b/acvm-repo/acir_field/src/generic_ark.rs @@ -2,6 +2,7 @@ use ark_ff::PrimeField; use ark_ff::Zero; use num_bigint::BigUint; use serde::{Deserialize, Serialize}; +use std::borrow::Cow; // XXX: Switch out for a trait and proper implementations // This implementation is in-efficient, can definitely remove hex usage and Iterator instances for trivial functionality @@ -125,8 +126,8 @@ impl<'de, T: ark_ff::PrimeField> Deserialize<'de> for FieldElement { where D: serde::Deserializer<'de>, { - let s = <&str>::deserialize(deserializer)?; - match Self::from_hex(s) { + let s: Cow<'de, str> = Deserialize::deserialize(deserializer)?; + match Self::from_hex(&s) { Some(value) => Ok(value), None => Err(serde::de::Error::custom(format!("Invalid hex for FieldElement: {s}",))), } diff --git a/acvm-repo/acvm/CHANGELOG.md b/acvm-repo/acvm/CHANGELOG.md index 29a4aa93adc..48d8317a80e 100644 --- a/acvm-repo/acvm/CHANGELOG.md +++ b/acvm-repo/acvm/CHANGELOG.md @@ -285,7 +285,7 @@ ### ⚠ BREAKING CHANGES * add backend-solvable blackboxes to brillig & unify implementations ([#422](https://github.com/noir-lang/acvm/issues/422)) -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) * **acvm:** Add `circuit: &Circuit` to `eth_contract_from_vk` function signature ([#420](https://github.com/noir-lang/acvm/issues/420)) * Returns index of failing opcode and transformation mapping ([#412](https://github.com/noir-lang/acvm/issues/412)) @@ -299,7 +299,7 @@ ### Miscellaneous Chores -* **acvm:** Remove `CircuitSimplifer` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) +* **acvm:** Remove `CircuitSimplifier` ([#421](https://github.com/noir-lang/acvm/issues/421)) ([e07a56d](https://github.com/noir-lang/acvm/commit/e07a56d9c542a7f03ce156761054cd403de0bd23)) ### Dependencies @@ -537,7 +537,7 @@ * require `Backend` to implement `Default` trait ([#223](https://github.com/noir-lang/acvm/issues/223)) * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) * return `PartialWitnessGeneratorStatus` from `PartialWitnessGenerator.solve` ([#213](https://github.com/noir-lang/acvm/issues/213)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ### Features @@ -557,7 +557,7 @@ ### Miscellaneous Chores * Make GeneralOptimizer crate visible ([#220](https://github.com/noir-lang/acvm/issues/220)) ([64bb346](https://github.com/noir-lang/acvm/commit/64bb346524428a0ce196826ea1e5ccde08ad6201)) -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) +* organize operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) ## [0.9.0](https://github.com/noir-lang/acvm/compare/acvm-v0.8.1...acvm-v0.9.0) (2023-04-07) @@ -641,7 +641,7 @@ * update `ProofSystemCompiler` to not take ownership of keys ([#111](https://github.com/noir-lang/acvm/issues/111)) * update `ProofSystemCompiler` methods to take `&Circuit` ([#108](https://github.com/noir-lang/acvm/issues/108)) * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ### Features @@ -659,4 +659,4 @@ ### Miscellaneous Chores * refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) -* reorganise compiler in terms of optimisers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) +* reorganize compiler in terms of optimizers and transformers ([#88](https://github.com/noir-lang/acvm/issues/88)) ([9329307](https://github.com/noir-lang/acvm/commit/9329307e054de202cfc55207162ad952b70d515e)) diff --git a/acvm-repo/acvm/Cargo.toml b/acvm-repo/acvm/Cargo.toml index 6e4c2d322bb..a40148a01ef 100644 --- a/acvm-repo/acvm/Cargo.toml +++ b/acvm-repo/acvm/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm" description = "The virtual machine that processes ACIR given a backend/proof system." # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -14,32 +14,27 @@ repository.workspace = true [dependencies] num-bigint.workspace = true -num-traits.workspace = true thiserror.workspace = true +tracing.workspace = true acir.workspace = true -stdlib.workspace = true brillig_vm.workspace = true acvm_blackbox_solver.workspace = true indexmap = "1.7.0" [features] -default = ["bn254", "testing"] +default = ["bn254"] bn254 = [ "acir/bn254", - "stdlib/bn254", "brillig_vm/bn254", "acvm_blackbox_solver/bn254", ] bls12_381 = [ "acir/bls12_381", - "stdlib/bls12_381", "brillig_vm/bls12_381", "acvm_blackbox_solver/bls12_381", ] -testing = ["stdlib/testing", "unstable-fallbacks"] -unstable-fallbacks = [] [dev-dependencies] rand = "0.8.5" diff --git a/acvm-repo/acvm/src/compiler/mod.rs b/acvm-repo/acvm/src/compiler/mod.rs index 4abf94a2e78..ccb043914d6 100644 --- a/acvm-repo/acvm/src/compiler/mod.rs +++ b/acvm-repo/acvm/src/compiler/mod.rs @@ -1,10 +1,8 @@ -use acir::{ - circuit::{opcodes::UnsupportedMemoryOpcode, Circuit, Opcode, OpcodeLocation}, - BlackBoxFunc, -}; -use thiserror::Error; +use std::collections::HashMap; -use crate::Language; +use acir::circuit::{Circuit, OpcodeLocation}; + +use crate::ExpressionWidth; // The various passes that we can use over ACIR mod optimizers; @@ -15,24 +13,26 @@ use optimizers::optimize_internal; pub use transformers::transform; use transformers::transform_internal; -#[derive(PartialEq, Eq, Debug, Error)] -pub enum CompileError { - #[error("The blackbox function {0} is not supported by the backend and acvm does not have a fallback implementation")] - UnsupportedBlackBox(BlackBoxFunc), - #[error("The opcode {0} is not supported by the backend and acvm does not have a fallback implementation")] - UnsupportedMemoryOpcode(UnsupportedMemoryOpcode), -} - /// This module moves and decomposes acir opcodes. The transformation map allows consumers of this module to map /// metadata they had about the opcodes to the new opcode structure generated after the transformation. #[derive(Debug)] pub struct AcirTransformationMap { - /// This is a vector of pointers to the old acir opcodes. The index of the vector is the new opcode index. - /// The value of the vector is the old opcode index pointed. - acir_opcode_positions: Vec, + /// Maps the old acir indices to the new acir indices + old_indices_to_new_indices: HashMap>, } impl AcirTransformationMap { + /// Builds a map from a vector of pointers to the old acir opcodes. + /// The index of the vector is the new opcode index. + /// The value of the vector is the old opcode index pointed. + fn new(acir_opcode_positions: Vec) -> Self { + let mut old_indices_to_new_indices = HashMap::with_capacity(acir_opcode_positions.len()); + for (new_index, old_index) in acir_opcode_positions.into_iter().enumerate() { + old_indices_to_new_indices.entry(old_index).or_insert_with(Vec::new).push(new_index); + } + AcirTransformationMap { old_indices_to_new_indices } + } + pub fn new_locations( &self, old_location: OpcodeLocation, @@ -42,16 +42,16 @@ impl AcirTransformationMap { OpcodeLocation::Brillig { acir_index, .. } => acir_index, }; - self.acir_opcode_positions - .iter() - .enumerate() - .filter(move |(_, &old_index)| old_index == old_acir_index) - .map(move |(new_index, _)| match old_location { - OpcodeLocation::Acir(_) => OpcodeLocation::Acir(new_index), - OpcodeLocation::Brillig { brillig_index, .. } => { - OpcodeLocation::Brillig { acir_index: new_index, brillig_index } - } - }) + self.old_indices_to_new_indices.get(&old_acir_index).into_iter().flat_map( + move |new_indices| { + new_indices.iter().map(move |new_index| match old_location { + OpcodeLocation::Acir(_) => OpcodeLocation::Acir(*new_index), + OpcodeLocation::Brillig { brillig_index, .. } => { + OpcodeLocation::Brillig { acir_index: *new_index, brillig_index } + } + }) + }, + ) } } @@ -71,15 +71,16 @@ fn transform_assert_messages( /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. pub fn compile( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { - let (acir, AcirTransformationMap { acir_opcode_positions }) = optimize_internal(acir); + expression_width: ExpressionWidth, +) -> (Circuit, AcirTransformationMap) { + let (acir, acir_opcode_positions) = optimize_internal(acir); + + let (mut acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); - let (mut acir, transformation_map) = - transform_internal(acir, np_language, is_opcode_supported, acir_opcode_positions)?; + let transformation_map = AcirTransformationMap::new(acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); - Ok((acir, transformation_map)) + (acir, transformation_map) } diff --git a/acvm-repo/acvm/src/compiler/optimizers/mod.rs b/acvm-repo/acvm/src/compiler/optimizers/mod.rs index 627ddbb4117..923756580b3 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/mod.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/mod.rs @@ -6,6 +6,7 @@ mod unused_memory; pub(crate) use general::GeneralOptimizer; pub(crate) use redundant_range::RangeOptimizer; +use tracing::info; use self::unused_memory::UnusedMemoryOptimizer; @@ -13,7 +14,9 @@ use super::{transform_assert_messages, AcirTransformationMap}; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { - let (mut acir, transformation_map) = optimize_internal(acir); + let (mut acir, new_opcode_positions) = optimize_internal(acir); + + let transformation_map = AcirTransformationMap::new(new_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); @@ -21,14 +24,17 @@ pub fn optimize(acir: Circuit) -> (Circuit, AcirTransformationMap) { } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] independent optimizations to a [`Circuit`]. -pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, AcirTransformationMap) { +#[tracing::instrument(level = "trace", name = "optimize_acir" skip(acir))] +pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, Vec) { + info!("Number of opcodes before: {}", acir.opcodes.len()); + // General optimizer pass let opcodes: Vec = acir .opcodes .into_iter() .map(|opcode| { - if let Opcode::Arithmetic(arith_expr) = opcode { - Opcode::Arithmetic(GeneralOptimizer::optimize(arith_expr)) + if let Opcode::AssertZero(arith_expr) = opcode { + Opcode::AssertZero(GeneralOptimizer::optimize(arith_expr)) } else { opcode } @@ -50,7 +56,7 @@ pub(super) fn optimize_internal(acir: Circuit) -> (Circuit, AcirTransformationMa let (acir, acir_opcode_positions) = range_optimizer.replace_redundant_ranges(acir_opcode_positions); - let transformation_map = AcirTransformationMap { acir_opcode_positions }; + info!("Number of opcodes after: {}", acir.opcodes.len()); - (acir, transformation_map) + (acir, acir_opcode_positions) } diff --git a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs index 766d3674113..5d19f9629ba 100644 --- a/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs +++ b/acvm-repo/acvm/src/compiler/optimizers/redundant_range.rs @@ -48,25 +48,51 @@ impl RangeOptimizer { /// only store the fact that we have constrained it to /// be 16 bits. fn collect_ranges(circuit: &Circuit) -> BTreeMap { - let mut witness_to_bit_sizes = BTreeMap::new(); + let mut witness_to_bit_sizes: BTreeMap = BTreeMap::new(); for opcode in &circuit.opcodes { - // Extract the witness index and number of bits, - // if it is a range constraint - let (witness, num_bits) = match extract_range_opcode(opcode) { - Some(func_inputs) => func_inputs, - None => continue, + let Some((witness, num_bits)) = (match opcode { + Opcode::AssertZero(expr) => { + // If the opcode is constraining a witness to be equal to a value then it can be considered + // as a range opcode for the number of bits required to hold that value. + if expr.is_degree_one_univariate() { + let (k, witness) = expr.linear_combinations[0]; + let constant = expr.q_c; + let witness_value = -constant / k; + + if witness_value.is_zero() { + Some((witness, 0)) + } else { + // We subtract off 1 bit from the implied witness value to give the weakest range constraint + // which would be stricter than the constraint imposed by this opcode. + let implied_range_constraint_bits = witness_value.num_bits() - 1; + Some((witness, implied_range_constraint_bits)) + } + } else { + None + } + } + + + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { + input: FunctionInput { witness, num_bits }, + }) => { + Some((*witness, *num_bits)) + } + + _ => None, + }) else { + continue; }; // Check if the witness has already been recorded and if the witness // size is more than the current one, we replace it - let should_replace = match witness_to_bit_sizes.get(&witness).copied() { - Some(old_range_bits) => old_range_bits > num_bits, - None => true, - }; - if should_replace { - witness_to_bit_sizes.insert(witness, num_bits); - } + witness_to_bit_sizes + .entry(witness) + .and_modify(|old_range_bits| { + *old_range_bits = std::cmp::min(*old_range_bits, num_bits); + }) + .or_insert(num_bits); } witness_to_bit_sizes } @@ -116,23 +142,17 @@ impl RangeOptimizer { /// Extract the range opcode from the `Opcode` enum /// Returns None, if `Opcode` is not the range opcode. fn extract_range_opcode(opcode: &Opcode) -> Option<(Witness, u32)> { - // Range constraints are blackbox function calls - // so we first extract the function call - let func_call = match opcode { - acir::circuit::Opcode::BlackBoxFuncCall(func_call) => func_call, - _ => return None, - }; - - // Skip if it is not a range constraint - match func_call { - BlackBoxFuncCall::RANGE { input } => Some((input.witness, input.num_bits)), + match opcode { + Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { input }) => { + Some((input.witness, input.num_bits)) + } _ => None, } } fn optimized_range_opcode(witness: Witness, num_bits: u32) -> Opcode { if num_bits == 1 { - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(FieldElement::one(), witness, witness)], linear_combinations: vec![(-FieldElement::one(), witness)], q_c: FieldElement::zero(), @@ -234,16 +254,29 @@ mod tests { #[test] fn non_range_opcodes() { // The optimizer should not remove or change non-range opcodes - // The four Arithmetic opcodes should remain unchanged. + // The four AssertZero opcodes should remain unchanged. let mut circuit = test_circuit(vec![(Witness(1), 16), (Witness(1), 16)]); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); - circuit.opcodes.push(Opcode::Arithmetic(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); + circuit.opcodes.push(Opcode::AssertZero(Expression::default())); let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); let optimizer = RangeOptimizer::new(circuit); let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); assert_eq!(optimized_circuit.opcodes.len(), 5); } + + #[test] + fn constant_implied_ranges() { + // The optimizer should use knowledge about constant witness assignments to remove range opcodes. + let mut circuit = test_circuit(vec![(Witness(1), 16)]); + + circuit.opcodes.push(Opcode::AssertZero(Witness(1).into())); + let acir_opcode_positions = circuit.opcodes.iter().enumerate().map(|(i, _)| i).collect(); + let optimizer = RangeOptimizer::new(circuit); + let (optimized_circuit, _) = optimizer.replace_redundant_ranges(acir_opcode_positions); + assert_eq!(optimized_circuit.opcodes.len(), 1); + assert_eq!(optimized_circuit.opcodes[0], Opcode::AssertZero(Witness(1).into())); + } } diff --git a/acvm-repo/acvm/src/compiler/transformers/csat.rs b/acvm-repo/acvm/src/compiler/transformers/csat.rs index 9f89ac4671a..9e2e3091c74 100644 --- a/acvm-repo/acvm/src/compiler/transformers/csat.rs +++ b/acvm-repo/acvm/src/compiler/transformers/csat.rs @@ -9,7 +9,7 @@ use indexmap::IndexMap; /// A transformer which processes any [`Expression`]s to break them up such that they /// fit within the [`ProofSystemCompiler`][crate::ProofSystemCompiler]'s width. /// -/// This transformer is only used when targetting the [`PLONKCSat`][crate::Language::PLONKCSat] language. +/// This transformer is only used when targeting the [`Bounded`][crate::ExpressionWidth::Bounded] configuration. /// /// This is done by creating intermediate variables to hold partial calculations and then combining them /// to calculate the original expression. @@ -62,7 +62,7 @@ impl CSatTransformer { } // Still missing dead witness optimization. - // To do this, we will need the whole set of arithmetic opcodes + // To do this, we will need the whole set of assert-zero opcodes // I think it can also be done before the local optimization seen here, as dead variables will come from the user pub(crate) fn transform( &mut self, @@ -84,7 +84,7 @@ impl CSatTransformer { opcode } - // This optimization will search for combinations of terms which can be represented in a single arithmetic opcode + // This optimization will search for combinations of terms which can be represented in a single assert-zero opcode // Case 1 : qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // This polynomial does not require any further optimizations, it can be safely represented in one opcode // ie a polynomial with 1 mul(bi-variate) term and 3 (univariate) terms where 2 of those terms match the bivariate term @@ -93,13 +93,13 @@ impl CSatTransformer { // // // Case 2: qM * wL * wR + qL * wL + qR * wR + qO * wO + qC + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will first extract the first full opcode(if possible): // t = qM * wL * wR + qL * wL + qR * wR + qO * wO + qC // // The polynomial now looks like so t + qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 - // This polynomial cannot be represented using one arithmetic opcode. + // This polynomial cannot be represented using one assert-zero opcode. // // This algorithm will then extract the second full opcode(if possible): // t2 = qM2 * wL2 * wR2 + qL * wL2 + qR * wR2 + qO * wO2 + qC2 @@ -121,7 +121,7 @@ impl CSatTransformer { // If the opcode only has one mul term, then this algorithm cannot optimize it any further // Either it can be represented in a single arithmetic equation or it's fan-in is too large and we need intermediate variables for those // large-fan-in optimization is not this algorithms purpose. - // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single arithmetic opcode or it has a large fan-in + // If the opcode has 0 mul terms, then it is an add opcode and similarly it can either fit into a single assert-zero opcode or it has a large fan-in if opcode.mul_terms.len() <= 1 { return opcode; } @@ -194,7 +194,7 @@ impl CSatTransformer { } } - // Now we have used up 2 spaces in our arithmetic opcode. The width now dictates, how many more we can add + // Now we have used up 2 spaces in our assert-zero opcode. The width now dictates, how many more we can add let mut remaining_space = self.width - 2 - 1; // We minus 1 because we need an extra space to contain the intermediate variable // Keep adding terms until we have no more left, or we reach the width let mut remaining_linear_terms = @@ -325,7 +325,7 @@ impl CSatTransformer { // Then use intermediate variables again to squash the fan-in, so that it can fit into the appropriate width // First check if this polynomial actually needs a partial opcode optimization - // There is the chance that it fits perfectly within the arithmetic opcode + // There is the chance that it fits perfectly within the assert-zero opcode if opcode.fits_in_one_identity(self.width) { return opcode; } diff --git a/acvm-repo/acvm/src/compiler/transformers/fallback.rs b/acvm-repo/acvm/src/compiler/transformers/fallback.rs deleted file mode 100644 index 06dfc84a798..00000000000 --- a/acvm-repo/acvm/src/compiler/transformers/fallback.rs +++ /dev/null @@ -1,158 +0,0 @@ -use super::super::CompileError; -use acir::{ - circuit::{opcodes::BlackBoxFuncCall, Circuit, Opcode}, - native_types::Expression, -}; - -/// The initial transformer to act on a [`Circuit`]. This replaces any unsupported opcodes with -/// fallback implementations consisting of well supported opcodes. -pub(crate) struct FallbackTransformer; - -impl FallbackTransformer { - //ACIR pass which replace unsupported opcodes using arithmetic fallback - pub(crate) fn transform( - acir: Circuit, - is_supported: impl Fn(&Opcode) -> bool, - opcode_positions: Vec, - ) -> Result<(Circuit, Vec), CompileError> { - let mut acir_supported_opcodes = Vec::with_capacity(acir.opcodes.len()); - let mut new_opcode_positions = Vec::with_capacity(opcode_positions.len()); - let mut witness_idx = acir.current_witness_index + 1; - - for (idx, opcode) in acir.opcodes.into_iter().enumerate() { - match &opcode { - Opcode::Arithmetic(_) | Opcode::Directive(_) | Opcode::Brillig(_) => { - // directive, arithmetic expression or blocks are handled by acvm - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - continue; - } - Opcode::MemoryInit { .. } | Opcode::MemoryOp { .. } => { - if !is_supported(&opcode) { - return Err(CompileError::UnsupportedMemoryOpcode( - opcode.unsupported_opcode(), - )); - } - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - } - Opcode::BlackBoxFuncCall(bb_func_call) => { - // We know it is an black box function. Now check if it is - // supported by the backend. If it is supported, then we can simply - // collect the opcode - if is_supported(&opcode) { - new_opcode_positions.push(opcode_positions[idx]); - acir_supported_opcodes.push(opcode); - continue; - } else { - // If we get here then we know that this black box function is not supported - // so we need to replace it with a version of the opcode which only uses arithmetic - // expressions - let (updated_witness_index, opcodes_fallback) = - Self::opcode_fallback(bb_func_call, witness_idx)?; - witness_idx = updated_witness_index; - new_opcode_positions - .extend(vec![opcode_positions[idx]; opcodes_fallback.len()]); - acir_supported_opcodes.extend(opcodes_fallback); - } - } - } - } - - Ok(( - Circuit { - current_witness_index: witness_idx - 1, - opcodes: acir_supported_opcodes, - ..acir - }, - new_opcode_positions, - )) - } - - fn opcode_fallback( - gc: &BlackBoxFuncCall, - current_witness_idx: u32, - ) -> Result<(u32, Vec), CompileError> { - let (updated_witness_index, opcodes_fallback) = match gc { - BlackBoxFuncCall::AND { lhs, rhs, output } => { - assert_eq!( - lhs.num_bits, rhs.num_bits, - "number of bits specified for each input must be the same" - ); - stdlib::blackbox_fallbacks::and( - Expression::from(lhs.witness), - Expression::from(rhs.witness), - *output, - lhs.num_bits, - current_witness_idx, - ) - } - BlackBoxFuncCall::XOR { lhs, rhs, output } => { - assert_eq!( - lhs.num_bits, rhs.num_bits, - "number of bits specified for each input must be the same" - ); - stdlib::blackbox_fallbacks::xor( - Expression::from(lhs.witness), - Expression::from(rhs.witness), - *output, - lhs.num_bits, - current_witness_idx, - ) - } - BlackBoxFuncCall::RANGE { input } => { - // Note there are no outputs because range produces no outputs - stdlib::blackbox_fallbacks::range( - Expression::from(input.witness), - input.num_bits, - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::SHA256 { inputs, outputs } => { - let sha256_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::sha256( - sha256_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::Blake2s { inputs, outputs } => { - let blake2s_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::blake2s( - blake2s_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::HashToField128Security { inputs, output } => { - let hash_to_field_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::hash_to_field( - hash_to_field_inputs, - *output, - current_witness_idx, - ) - } - #[cfg(feature = "unstable-fallbacks")] - BlackBoxFuncCall::Keccak256 { inputs, outputs } => { - let keccak_inputs = - inputs.iter().map(|input| (input.witness.into(), input.num_bits)).collect(); - stdlib::blackbox_fallbacks::keccak256( - keccak_inputs, - outputs.to_vec(), - current_witness_idx, - ) - } - _ => { - return Err(CompileError::UnsupportedBlackBox(gc.get_black_box_func())); - } - }; - - Ok((updated_witness_index, opcodes_fallback)) - } -} diff --git a/acvm-repo/acvm/src/compiler/transformers/mod.rs b/acvm-repo/acvm/src/compiler/transformers/mod.rs index d827b759666..306ea1b7c12 100644 --- a/acvm-repo/acvm/src/compiler/transformers/mod.rs +++ b/acvm-repo/acvm/src/compiler/transformers/mod.rs @@ -5,56 +5,50 @@ use acir::{ }; use indexmap::IndexMap; -use crate::Language; +use crate::ExpressionWidth; mod csat; -mod fallback; mod r1cs; pub(crate) use csat::CSatTransformer; -pub(crate) use fallback::FallbackTransformer; pub(crate) use r1cs::R1CSTransformer; -use super::{transform_assert_messages, AcirTransformationMap, CompileError}; +use super::{transform_assert_messages, AcirTransformationMap}; /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. pub fn transform( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { + expression_width: ExpressionWidth, +) -> (Circuit, AcirTransformationMap) { // Track original acir opcode positions throughout the transformation passes of the compilation // by applying the modifications done to the circuit opcodes and also to the opcode_positions (delete and insert) let acir_opcode_positions = acir.opcodes.iter().enumerate().map(|(i, _)| i).collect(); - let (mut acir, transformation_map) = - transform_internal(acir, np_language, is_opcode_supported, acir_opcode_positions)?; + let (mut acir, acir_opcode_positions) = + transform_internal(acir, expression_width, acir_opcode_positions); + + let transformation_map = AcirTransformationMap::new(acir_opcode_positions); acir.assert_messages = transform_assert_messages(acir.assert_messages, &transformation_map); - Ok((acir, transformation_map)) + (acir, transformation_map) } /// Applies [`ProofSystemCompiler`][crate::ProofSystemCompiler] specific optimizations to a [`Circuit`]. /// /// Accepts an injected `acir_opcode_positions` to allow transformations to be applied directly after optimizations. +#[tracing::instrument(level = "trace", name = "transform_acir", skip(acir, acir_opcode_positions))] pub(super) fn transform_internal( acir: Circuit, - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool, + expression_width: ExpressionWidth, acir_opcode_positions: Vec, -) -> Result<(Circuit, AcirTransformationMap), CompileError> { - // Fallback transformer pass - let (acir, acir_opcode_positions) = - FallbackTransformer::transform(acir, is_opcode_supported, acir_opcode_positions)?; - - let mut transformer = match &np_language { - crate::Language::R1CS => { - let transformation_map = AcirTransformationMap { acir_opcode_positions }; +) -> (Circuit, Vec) { + let mut transformer = match &expression_width { + crate::ExpressionWidth::Unbounded => { let transformer = R1CSTransformer::new(acir); - return Ok((transformer.transform(), transformation_map)); + return (transformer.transform(), acir_opcode_positions); } - crate::Language::PLONKCSat { width } => { + crate::ExpressionWidth::Bounded { width } => { let mut csat = CSatTransformer::new(*width); for value in acir.circuit_arguments() { csat.mark_solvable(value); @@ -68,7 +62,7 @@ pub(super) fn transform_internal( // TODO or at the very least, we could put all of it inside of CSatOptimizer pass let mut new_acir_opcode_positions: Vec = Vec::with_capacity(acir_opcode_positions.len()); - // Optimize the arithmetic gates by reducing them into the correct width and + // Optimize the assert-zero gates by reducing them into the correct width and // creating intermediate variables when necessary let mut transformed_opcodes = Vec::new(); @@ -78,7 +72,7 @@ pub(super) fn transform_internal( let mut intermediate_variables: IndexMap = IndexMap::new(); for (index, opcode) in acir.opcodes.into_iter().enumerate() { match opcode { - Opcode::Arithmetic(arith_expr) => { + Opcode::AssertZero(arith_expr) => { let len = intermediate_variables.len(); let arith_expr = transformer.transform( @@ -101,7 +95,7 @@ pub(super) fn transform_internal( new_opcodes.push(arith_expr); for opcode in new_opcodes { new_acir_opcode_positions.push(acir_opcode_positions[index]); - transformed_opcodes.push(Opcode::Arithmetic(opcode)); + transformed_opcodes.push(Opcode::AssertZero(opcode)); } } Opcode::BlackBoxFuncCall(ref func) => { @@ -110,18 +104,17 @@ pub(super) fn transform_internal( | acir::circuit::opcodes::BlackBoxFuncCall::XOR { output, .. } => { transformer.mark_solvable(*output); } - acir::circuit::opcodes::BlackBoxFuncCall::RANGE { .. } => (), + acir::circuit::opcodes::BlackBoxFuncCall::RANGE { .. } + | acir::circuit::opcodes::BlackBoxFuncCall::RecursiveAggregation { .. } => (), acir::circuit::opcodes::BlackBoxFuncCall::SHA256 { outputs, .. } | acir::circuit::opcodes::BlackBoxFuncCall::Keccak256 { outputs, .. } | acir::circuit::opcodes::BlackBoxFuncCall::Keccak256VariableLength { outputs, .. } - | acir::circuit::opcodes::BlackBoxFuncCall::RecursiveAggregation { - output_aggregation_object: outputs, - .. - } - | acir::circuit::opcodes::BlackBoxFuncCall::Blake2s { outputs, .. } => { + | acir::circuit::opcodes::BlackBoxFuncCall::Keccakf1600 { outputs, .. } + | acir::circuit::opcodes::BlackBoxFuncCall::Blake2s { outputs, .. } + | acir::circuit::opcodes::BlackBoxFuncCall::Blake3 { outputs, .. } => { for witness in outputs { transformer.mark_solvable(*witness); } @@ -130,6 +123,13 @@ pub(super) fn transform_internal( outputs, .. } + | acir::circuit::opcodes::BlackBoxFuncCall::EmbeddedCurveAdd { + outputs, .. + } + | acir::circuit::opcodes::BlackBoxFuncCall::EmbeddedCurveDouble { + outputs, + .. + } | acir::circuit::opcodes::BlackBoxFuncCall::PedersenCommitment { outputs, .. @@ -137,11 +137,7 @@ pub(super) fn transform_internal( transformer.mark_solvable(outputs.0); transformer.mark_solvable(outputs.1); } - acir::circuit::opcodes::BlackBoxFuncCall::HashToField128Security { - output, - .. - } - | acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } + acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256k1 { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::EcdsaSecp256r1 { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::SchnorrVerify { output, .. } | acir::circuit::opcodes::BlackBoxFuncCall::PedersenHash { output, .. } => { @@ -154,10 +150,6 @@ pub(super) fn transform_internal( } Opcode::Directive(ref directive) => { match directive { - Directive::Quotient(quotient_directive) => { - transformer.mark_solvable(quotient_directive.q); - transformer.mark_solvable(quotient_directive.r); - } Directive::ToLeRadix { b, .. } => { for witness in b { transformer.mark_solvable(*witness); @@ -214,8 +206,5 @@ pub(super) fn transform_internal( ..acir }; - let transformation_map = - AcirTransformationMap { acir_opcode_positions: new_acir_opcode_positions }; - - Ok((acir, transformation_map)) + (acir, new_acir_opcode_positions) } diff --git a/acvm-repo/acvm/src/lib.rs b/acvm-repo/acvm/src/lib.rs index 0ab037a2e4b..626bb2c9b91 100644 --- a/acvm-repo/acvm/src/lib.rs +++ b/acvm-repo/acvm/src/lib.rs @@ -18,10 +18,16 @@ pub use brillig_vm; // re-export blackbox solver pub use acvm_blackbox_solver as blackbox_solver; -/// Supported NP complete languages -/// This might need to be in ACIR instead +/// Specifies the maximum width of the expressions which will be constrained. +/// +/// Unbounded Expressions are useful if you are eventually going to pass the ACIR +/// into a proving system which supports R1CS. +/// +/// Bounded Expressions are useful if you are eventually going to pass the ACIR +/// into a proving system which supports PLONK, where arithmetic expressions have a +/// finite fan-in. #[derive(Debug, Clone, Copy)] -pub enum Language { - R1CS, - PLONKCSat { width: usize }, +pub enum ExpressionWidth { + Unbounded, + Bounded { width: usize }, } diff --git a/acvm-repo/acvm/src/pwg/arithmetic.rs b/acvm-repo/acvm/src/pwg/arithmetic.rs index 93a39fb249c..81462ea495e 100644 --- a/acvm-repo/acvm/src/pwg/arithmetic.rs +++ b/acvm-repo/acvm/src/pwg/arithmetic.rs @@ -5,9 +5,9 @@ use acir::{ use super::{insert_value, ErrorLocation, OpcodeNotSolvable, OpcodeResolutionError}; -/// An Arithmetic solver will take a Circuit's arithmetic opcodes with witness assignments +/// An Expression solver will take a Circuit's assert-zero opcodes with witness assignments /// and create the other witness variables -pub(super) struct ArithmeticSolver; +pub(super) struct ExpressionSolver; #[allow(clippy::enum_variant_names)] pub(super) enum OpcodeStatus { @@ -22,17 +22,17 @@ pub(crate) enum MulTerm { Solved(FieldElement), } -impl ArithmeticSolver { +impl ExpressionSolver { /// Derives the rest of the witness based on the initial low level variables pub(super) fn solve( initial_witness: &mut WitnessMap, opcode: &Expression, ) -> Result<(), OpcodeResolutionError> { - let opcode = &ArithmeticSolver::evaluate(opcode, initial_witness); + let opcode = &ExpressionSolver::evaluate(opcode, initial_witness); // Evaluate multiplication term - let mul_result = ArithmeticSolver::solve_mul_term(opcode, initial_witness); + let mul_result = ExpressionSolver::solve_mul_term(opcode, initial_witness); // Evaluate the fan-in terms - let opcode_status = ArithmeticSolver::solve_fan_in_term(opcode, initial_witness); + let opcode_status = ExpressionSolver::solve_fan_in_term(opcode, initial_witness); match (mul_result, opcode_status) { (MulTerm::TooManyUnknowns, _) | (_, OpcodeStatus::OpcodeUnsolvable) => { @@ -126,7 +126,7 @@ impl ArithmeticSolver { } } - /// Returns the evaluation of the multiplication term in the arithmetic opcode + /// Returns the evaluation of the multiplication term in the expression /// If the witness values are not known, then the function returns a None /// XXX: Do we need to account for the case where 5xy + 6x = 0 ? We do not know y, but it can be solved given x . But I believe x can be solved with another opcode /// XXX: What about making a mul opcode = a constant 5xy + 7 = 0 ? This is the same as the above. @@ -135,11 +135,11 @@ impl ArithmeticSolver { // We are assuming it has been optimized. match arith_opcode.mul_terms.len() { 0 => MulTerm::Solved(FieldElement::zero()), - 1 => ArithmeticSolver::solve_mul_term_helper( + 1 => ExpressionSolver::solve_mul_term_helper( &arith_opcode.mul_terms[0], witness_assignments, ), - _ => panic!("Mul term in the arithmetic opcode must contain either zero or one term"), + _ => panic!("Mul term in the assert-zero opcode must contain either zero or one term"), } } @@ -186,7 +186,7 @@ impl ArithmeticSolver { let mut result = FieldElement::zero(); for term in arith_opcode.linear_combinations.iter() { - let value = ArithmeticSolver::solve_fan_in_term_helper(term, witness_assignments); + let value = ExpressionSolver::solve_fan_in_term_helper(term, witness_assignments); match value { Some(a) => result += a, None => { @@ -212,7 +212,7 @@ impl ArithmeticSolver { pub(super) fn evaluate(expr: &Expression, initial_witness: &WitnessMap) -> Expression { let mut result = Expression::default(); for &(c, w1, w2) in &expr.mul_terms { - let mul_result = ArithmeticSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); + let mul_result = ExpressionSolver::solve_mul_term_helper(&(c, w1, w2), initial_witness); match mul_result { MulTerm::OneUnknown(v, w) => { if !v.is_zero() { @@ -228,7 +228,7 @@ impl ArithmeticSolver { } } for &(c, w) in &expr.linear_combinations { - if let Some(f) = ArithmeticSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { + if let Some(f) = ExpressionSolver::solve_fan_in_term_helper(&(c, w), initial_witness) { result.q_c += f; } else if !c.is_zero() { result.linear_combinations.push((c, w)); @@ -240,7 +240,7 @@ impl ArithmeticSolver { } #[test] -fn arithmetic_smoke_test() { +fn expression_solver_smoke_test() { let a = Witness(0); let b = Witness(1); let c = Witness(2); @@ -274,8 +274,8 @@ fn arithmetic_smoke_test() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(1_i128)); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_a), Ok(())); - assert_eq!(ArithmeticSolver::solve(&mut values, &opcode_b), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_a), Ok(())); + assert_eq!(ExpressionSolver::solve(&mut values, &opcode_b), Ok(())); assert_eq!(values.get(&a).unwrap(), &FieldElement::from(4_i128)); } diff --git a/acvm-repo/acvm/src/pwg/blackbox/hash.rs b/acvm-repo/acvm/src/pwg/blackbox/hash.rs index 80665a743c4..1ada397fc59 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/hash.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/hash.rs @@ -3,26 +3,11 @@ use acir::{ native_types::{Witness, WitnessMap}, BlackBoxFunc, FieldElement, }; -use acvm_blackbox_solver::{hash_to_field_128_security, BlackBoxResolutionError}; +use acvm_blackbox_solver::BlackBoxResolutionError; use crate::pwg::{insert_value, witness_to_value}; use crate::OpcodeResolutionError; -/// Attempts to solve a `HashToField128Security` opcode -/// If successful, `initial_witness` will be mutated to contain the new witness assignment. -pub(super) fn solve_hash_to_field( - initial_witness: &mut WitnessMap, - inputs: &[FunctionInput], - output: &Witness, -) -> Result<(), OpcodeResolutionError> { - let message_input = get_hash_input(initial_witness, inputs, None)?; - let field = hash_to_field_128_security(&message_input)?; - - insert_value(output, field, initial_witness)?; - - Ok(()) -} - /// Attempts to solve a 256 bit hash function opcode. /// If successful, `initial_witness` will be mutated to contain the new witness assignment. pub(super) fn solve_generic_256_hash_opcode( diff --git a/acvm-repo/acvm/src/pwg/blackbox/mod.rs b/acvm-repo/acvm/src/pwg/blackbox/mod.rs index 7e8ab8b948c..5eea234885c 100644 --- a/acvm-repo/acvm/src/pwg/blackbox/mod.rs +++ b/acvm-repo/acvm/src/pwg/blackbox/mod.rs @@ -3,12 +3,12 @@ use acir::{ native_types::{Witness, WitnessMap}, FieldElement, }; -use acvm_blackbox_solver::{blake2s, keccak256, sha256}; +use acvm_blackbox_solver::{blake2s, blake3, keccak256, keccakf1600, sha256}; use self::pedersen::pedersen_hash; use super::{insert_value, OpcodeNotSolvable, OpcodeResolutionError}; -use crate::BlackBoxFunctionSolver; +use crate::{pwg::witness_to_value, BlackBoxFunctionSolver}; mod fixed_base_scalar_mul; mod hash; @@ -19,7 +19,7 @@ mod signature; use fixed_base_scalar_mul::fixed_base_scalar_mul; // Hash functions should eventually be exposed for external consumers. -use hash::{solve_generic_256_hash_opcode, solve_hash_to_field}; +use hash::solve_generic_256_hash_opcode; use logic::{and, xor}; use pedersen::pedersen; use range::solve_range_opcode; @@ -83,6 +83,14 @@ pub(crate) fn solve( blake2s, bb_func.get_black_box_func(), ), + BlackBoxFuncCall::Blake3 { inputs, outputs } => solve_generic_256_hash_opcode( + initial_witness, + inputs, + None, + outputs, + blake3, + bb_func.get_black_box_func(), + ), BlackBoxFuncCall::Keccak256 { inputs, outputs } => solve_generic_256_hash_opcode( initial_witness, inputs, @@ -101,8 +109,21 @@ pub(crate) fn solve( bb_func.get_black_box_func(), ) } - BlackBoxFuncCall::HashToField128Security { inputs, output } => { - solve_hash_to_field(initial_witness, inputs, output) + BlackBoxFuncCall::Keccakf1600 { inputs, outputs } => { + let mut state = [0; 25]; + for (i, input) in inputs.iter().enumerate() { + let witness = input.witness; + let num_bits = input.num_bits as usize; + assert_eq!(num_bits, 64); + let witness_assignment = witness_to_value(initial_witness, witness)?; + let lane = witness_assignment.try_to_u64(); + state[i] = lane.unwrap(); + } + let output_state = keccakf1600(state)?; + for (output_witness, value) in outputs.iter().zip(output_state.into_iter()) { + insert_value(output_witness, FieldElement::from(value as u128), initial_witness)?; + } + Ok(()) } BlackBoxFuncCall::SchnorrVerify { public_key_x, @@ -156,13 +177,13 @@ pub(crate) fn solve( BlackBoxFuncCall::FixedBaseScalarMul { low, high, outputs } => { fixed_base_scalar_mul(backend, initial_witness, *low, *high, *outputs) } - BlackBoxFuncCall::RecursiveAggregation { output_aggregation_object, .. } => { - // Solve the output of the recursive aggregation to zero to prevent missing assignment errors - // The correct value will be computed by the backend - for witness in output_aggregation_object { - insert_value(witness, FieldElement::zero(), initial_witness)?; - } - Ok(()) + BlackBoxFuncCall::EmbeddedCurveAdd { .. } => { + todo!(); + } + BlackBoxFuncCall::EmbeddedCurveDouble { .. } => { + todo!(); } + // Recursive aggregation will be entirely handled by the backend and is not solved by the ACVM + BlackBoxFuncCall::RecursiveAggregation { .. } => Ok(()), } } diff --git a/acvm-repo/acvm/src/pwg/directives/mod.rs b/acvm-repo/acvm/src/pwg/directives/mod.rs index cfc458dd611..4605168d98b 100644 --- a/acvm-repo/acvm/src/pwg/directives/mod.rs +++ b/acvm-repo/acvm/src/pwg/directives/mod.rs @@ -1,12 +1,7 @@ use std::cmp::Ordering; -use acir::{ - circuit::directives::{Directive, QuotientDirective}, - native_types::WitnessMap, - FieldElement, -}; +use acir::{circuit::directives::Directive, native_types::WitnessMap, FieldElement}; use num_bigint::BigUint; -use num_traits::Zero; use crate::OpcodeResolutionError; @@ -25,38 +20,6 @@ pub(super) fn solve_directives( directive: &Directive, ) -> Result<(), OpcodeResolutionError> { match directive { - Directive::Quotient(QuotientDirective { a, b, q, r, predicate }) => { - let val_a = get_value(a, initial_witness)?; - let val_b = get_value(b, initial_witness)?; - let int_a = BigUint::from_bytes_be(&val_a.to_be_bytes()); - let int_b = BigUint::from_bytes_be(&val_b.to_be_bytes()); - - // If the predicate is `None`, then we simply return the value 1 - // If the predicate is `Some` but we cannot find a value, then we return unresolved - let pred_value = match predicate { - Some(pred) => get_value(pred, initial_witness)?, - None => FieldElement::one(), - }; - - let (int_r, int_q) = if pred_value.is_zero() || int_b.is_zero() { - (BigUint::zero(), BigUint::zero()) - } else { - (&int_a % &int_b, &int_a / &int_b) - }; - - insert_value( - q, - FieldElement::from_be_bytes_reduce(&int_q.to_bytes_be()), - initial_witness, - )?; - insert_value( - r, - FieldElement::from_be_bytes_reduce(&int_r.to_bytes_be()), - initial_witness, - )?; - - Ok(()) - } Directive::ToLeRadix { a, b, radix } => { let value_a = get_value(a, initial_witness)?; let big_integer = BigUint::from_bytes_be(&value_a.to_be_bytes()); @@ -120,31 +83,3 @@ pub(super) fn solve_directives( } } } - -#[cfg(test)] -mod tests { - use acir::{ - circuit::directives::{Directive, QuotientDirective}, - native_types::{Expression, Witness, WitnessMap}, - FieldElement, - }; - - use super::solve_directives; - - #[test] - fn divisor_is_zero() { - let quotient_directive = QuotientDirective { - a: Expression::zero(), - b: Expression::zero(), - q: Witness(0), - r: Witness(0), - predicate: Some(Expression::one()), - }; - - let mut witness_map = WitnessMap::new(); - witness_map.insert(Witness(0), FieldElement::zero()); - - solve_directives(&mut witness_map, &Directive::Quotient(quotient_directive)) - .expect("expected 0/0 to return 0"); - } -} diff --git a/acvm-repo/acvm/src/pwg/memory_op.rs b/acvm-repo/acvm/src/pwg/memory_op.rs index 42951dfa3c1..c1da2cd95cf 100644 --- a/acvm-repo/acvm/src/pwg/memory_op.rs +++ b/acvm-repo/acvm/src/pwg/memory_op.rs @@ -6,7 +6,7 @@ use acir::{ FieldElement, }; -use super::{arithmetic::ArithmeticSolver, get_value, insert_value, witness_to_value}; +use super::{arithmetic::ExpressionSolver, get_value, insert_value, witness_to_value}; use super::{ErrorLocation, OpcodeResolutionError}; type MemoryIndex = u32; @@ -75,7 +75,7 @@ impl MemoryOpSolver { // // In read operations, this corresponds to the witness index at which the value from memory will be written. // In write operations, this corresponds to the expression which will be written to memory. - let value = ArithmeticSolver::evaluate(&op.value, initial_witness); + let value = ExpressionSolver::evaluate(&op.value, initial_witness); // `operation == 0` implies a read operation. (`operation == 1` implies write operation). let is_read_operation = operation.is_zero(); diff --git a/acvm-repo/acvm/src/pwg/mod.rs b/acvm-repo/acvm/src/pwg/mod.rs index c1edf60161a..41b96572658 100644 --- a/acvm-repo/acvm/src/pwg/mod.rs +++ b/acvm-repo/acvm/src/pwg/mod.rs @@ -10,8 +10,8 @@ use acir::{ }; use acvm_blackbox_solver::BlackBoxResolutionError; -use self::{arithmetic::ArithmeticSolver, directives::solve_directives, memory_op::MemoryOpSolver}; -use crate::{BlackBoxFunctionSolver, Language}; +use self::{arithmetic::ExpressionSolver, directives::solve_directives, memory_op::MemoryOpSolver}; +use crate::BlackBoxFunctionSolver; use thiserror::Error; @@ -69,8 +69,8 @@ pub enum StepResult<'a, B: BlackBoxFunctionSolver> { // The most common being that one of its input has not been // assigned a value. // -// TODO: ExpressionHasTooManyUnknowns is specific for arithmetic expressions -// TODO: we could have a error enum for arithmetic failure cases in that module +// TODO: ExpressionHasTooManyUnknowns is specific for expression solver +// TODO: we could have a error enum for expression solver failure cases in that module // TODO that can be converted into an OpcodeNotSolvable or OpcodeResolutionError enum #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum OpcodeNotSolvable { @@ -104,8 +104,6 @@ impl std::fmt::Display for ErrorLocation { pub enum OpcodeResolutionError { #[error("Cannot solve opcode: {0}")] OpcodeNotSolvable(#[from] OpcodeNotSolvable), - #[error("Backend does not currently support the {0} opcode. ACVM does not currently have a fallback for this opcode.")] - UnsupportedBlackBoxFunc(BlackBoxFunc), #[error("Cannot satisfy constraint")] UnsatisfiedConstrain { opcode_location: ErrorLocation }, #[error("Index out of bounds, array has size {array_size:?}, but index was {index:?}")] @@ -122,9 +120,6 @@ impl From for OpcodeResolutionError { BlackBoxResolutionError::Failed(func, reason) => { OpcodeResolutionError::BlackBoxFunctionFailed(func, reason) } - BlackBoxResolutionError::Unsupported(func) => { - OpcodeResolutionError::UnsupportedBlackBoxFunc(func) - } } } } @@ -258,7 +253,7 @@ impl<'a, B: BlackBoxFunctionSolver> ACVM<'a, B> { let opcode = &self.opcodes[self.instruction_pointer]; let resolution = match opcode { - Opcode::Arithmetic(expr) => ArithmeticSolver::solve(&mut self.witness_map, expr), + Opcode::AssertZero(expr) => ExpressionSolver::solve(&mut self.witness_map, expr), Opcode::BlackBoxFuncCall(bb_func) => { blackbox::solve(self.backend, &mut self.witness_map, bb_func) } @@ -402,7 +397,7 @@ pub fn get_value( expr: &Expression, initial_witness: &WitnessMap, ) -> Result { - let expr = ArithmeticSolver::evaluate(expr, initial_witness); + let expr = ExpressionSolver::evaluate(expr, initial_witness); match expr.to_const() { Some(value) => Ok(value), None => Err(OpcodeResolutionError::OpcodeNotSolvable( @@ -450,30 +445,3 @@ fn any_witness_from_expression(expr: &Expression) -> Option { Some(expr.linear_combinations[0].1) } } - -#[deprecated( - note = "For backwards compatibility, this method allows you to derive _sensible_ defaults for opcode support based on the np language. \n Backends should simply specify what they support." -)] -// This is set to match the previous functionality that we had -// Where we could deduce what opcodes were supported -// by knowing the np complete language -pub fn default_is_opcode_supported(language: Language) -> fn(&Opcode) -> bool { - // R1CS does not support any of the opcode except Arithmetic by default. - // The compiler will replace those that it can -- ie range, xor, and - fn r1cs_is_supported(opcode: &Opcode) -> bool { - matches!(opcode, Opcode::Arithmetic(_)) - } - - // PLONK supports most of the opcodes by default - // The ones which are not supported, the acvm compiler will - // attempt to transform into supported opcodes. If these are also not available - // then a compiler error will be emitted. - fn plonk_is_supported(_opcode: &Opcode) -> bool { - true - } - - match language { - Language::R1CS => r1cs_is_supported, - Language::PLONKCSat { .. } => plonk_is_supported, - } -} diff --git a/acvm-repo/acvm/tests/solver.rs b/acvm-repo/acvm/tests/solver.rs index d578555c5dc..486e04d5bf1 100644 --- a/acvm-repo/acvm/tests/solver.rs +++ b/acvm-repo/acvm/tests/solver.rs @@ -11,51 +11,12 @@ use acir::{ FieldElement, }; -use acvm::{ - pwg::{ACVMStatus, ErrorLocation, ForeignCallWaitInfo, OpcodeResolutionError, ACVM}, - BlackBoxFunctionSolver, -}; -use acvm_blackbox_solver::BlackBoxResolutionError; - -pub(crate) struct StubbedBackend; - -impl BlackBoxFunctionSolver for StubbedBackend { - fn schnorr_verify( - &self, - _public_key_x: &FieldElement, - _public_key_y: &FieldElement, - _signature: &[u8], - _message: &[u8], - ) -> Result { - panic!("Path not trodden by this test") - } - fn pedersen_commitment( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - panic!("Path not trodden by this test") - } - fn pedersen_hash( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result { - panic!("Path not trodden by this test") - } - fn fixed_base_scalar_mul( - &self, - _low: &FieldElement, - _high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - panic!("Path not trodden by this test") - } -} +use acvm::pwg::{ACVMStatus, ErrorLocation, ForeignCallWaitInfo, OpcodeResolutionError, ACVM}; +use acvm_blackbox_solver::StubbedBlackBoxSolver; // Reenable these test cases once we move the brillig implementation of inversion down into the acvm stdlib. #[test] -#[ignore] fn inversion_brillig_oracle_equivalence() { // Opcodes below describe the following: // fn main(x : Field, y : pub Field) { @@ -111,18 +72,18 @@ fn inversion_brillig_oracle_equivalence() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -135,7 +96,7 @@ fn inversion_brillig_oracle_equivalence() { ]) .into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments); // use the partial witness generation solver with our acir program let solver_status = acvm.solve(); @@ -164,7 +125,6 @@ fn inversion_brillig_oracle_equivalence() { } #[test] -#[ignore] fn double_inversion_brillig_oracle() { // Opcodes below describe the following: // fn main(x : Field, y : pub Field) { @@ -238,18 +198,18 @@ fn double_inversion_brillig_oracle() { let opcodes = vec![ Opcode::Brillig(brillig_data), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, }), // Opcode::Directive(Directive::Invert { x: w_z, result: w_z_inverse }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![(fe_1, w_z, w_z_inverse)], linear_combinations: vec![], q_c: -fe_1, }), - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(-fe_1, w_oracle), (fe_1, w_z_inverse)], q_c: fe_0, @@ -264,7 +224,7 @@ fn double_inversion_brillig_oracle() { ]) .into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments); // use the partial witness generation solver with our acir program let solver_status = acvm.solve(); @@ -377,15 +337,15 @@ fn oracle_dependent_execution() { }; let opcodes = vec![ - Opcode::Arithmetic(equality_check), + Opcode::AssertZero(equality_check), Opcode::Brillig(brillig_data), - Opcode::Arithmetic(inverse_equality_check), + Opcode::AssertZero(inverse_equality_check), ]; let witness_assignments = BTreeMap::from([(w_x, FieldElement::from(2u128)), (w_y, FieldElement::from(2u128))]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments); // use the partial witness generation solver with our acir program let solver_status = acvm.solve(); @@ -484,13 +444,14 @@ fn brillig_oracle_predicate() { ]) .into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, witness_assignments); let solver_status = acvm.solve(); assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); // ACVM should be able to be finalized in `Solved` state. acvm.finalize(); } + #[test] fn unsatisfied_opcode_resolved() { let a = Witness(0); @@ -516,8 +477,8 @@ fn unsatisfied_opcode_resolved() { values.insert(c, FieldElement::from(1_i128)); values.insert(d, FieldElement::from(2_i128)); - let opcodes = vec![Opcode::Arithmetic(opcode_a)]; - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); + let opcodes = vec![Opcode::AssertZero(opcode_a)]; + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, values); let solver_status = acvm.solve(); assert_eq!( solver_status, @@ -595,9 +556,9 @@ fn unsatisfied_opcode_resolved_brillig() { values.insert(w_y, FieldElement::from(1_i128)); values.insert(w_result, FieldElement::from(0_i128)); - let opcodes = vec![brillig_opcode, Opcode::Arithmetic(opcode_a)]; + let opcodes = vec![brillig_opcode, Opcode::AssertZero(opcode_a)]; - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, values); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, values); let solver_status = acvm.solve(); assert_eq!( solver_status, @@ -630,7 +591,7 @@ fn memory_operations() { predicate: None, }; - let expression = Opcode::Arithmetic(Expression { + let expression = Opcode::AssertZero(Expression { mul_terms: Vec::new(), linear_combinations: vec![ (FieldElement::one(), Witness(7)), @@ -641,7 +602,7 @@ fn memory_operations() { let opcodes = vec![init, read_op, expression]; - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, initial_witness); + let mut acvm = ACVM::new(&StubbedBlackBoxSolver, &opcodes, initial_witness); let solver_status = acvm.solve(); assert_eq!(solver_status, ACVMStatus::Solved); let witness_map = acvm.finalize(); diff --git a/acvm-repo/acvm/tests/stdlib.rs b/acvm-repo/acvm/tests/stdlib.rs deleted file mode 100644 index c96c55f9401..00000000000 --- a/acvm-repo/acvm/tests/stdlib.rs +++ /dev/null @@ -1,354 +0,0 @@ -#![cfg(feature = "testing")] -mod solver; -use crate::solver::StubbedBackend; -use acir::{ - circuit::{ - opcodes::{BlackBoxFuncCall, FunctionInput}, - Circuit, Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; -use acvm::{ - compiler::compile, - pwg::{ACVMStatus, ACVM}, - Language, -}; -use acvm_blackbox_solver::{blake2s, hash_to_field_128_security, keccak256, sha256}; -use paste::paste; -use proptest::prelude::*; -use std::collections::{BTreeMap, BTreeSet}; -use stdlib::blackbox_fallbacks::{UInt32, UInt64, UInt8}; - -test_uint!(test_uint8, UInt8, u8, 8); -test_uint!(test_uint32, UInt32, u32, 32); -test_uint!(test_uint64, UInt64, u64, 64); - -#[macro_export] -macro_rules! test_uint { - ( - $name:tt, - $uint:ident, - $u:ident, - $size:expr - ) => { - paste! { - test_uint_inner!( - [<$name _rol>], - [<$name _ror>], - [<$name _euclidean_division>], - [<$name _add>], - [<$name _sub>], - [<$name _left_shift>], - [<$name _right_shift>], - [<$name _less_than>], - $uint, - $u, - $size - ); - } - }; -} - -#[macro_export] -macro_rules! test_uint_inner { - ( - $rol:tt, - $ror:tt, - $euclidean_division:tt, - $add:tt, - $sub:tt, - $left_shift:tt, - $right_shift:tt, - $less_than:tt, - $uint: ident, - $u: ident, - $size: expr - ) => { - proptest! { - #[test] - fn $rol(x in 0..$u::MAX, y in 0..32_u32) { - let fe = FieldElement::from(x as u128); - let w = Witness(1); - let result = x.rotate_left(y); - let uint = $uint::new(w); - let (w, extra_opcodes, _) = uint.rol(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), fe)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $ror(x in 0..$u::MAX, y in 0..32_u32) { - let fe = FieldElement::from(x as u128); - let w = Witness(1); - let result = x.rotate_right(y); - let uint = $uint::new(w); - let (w, extra_opcodes, _) = uint.ror(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), fe)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $euclidean_division(x in 0..$u::MAX, y in 1 - ..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let w1 = Witness(1); - let w2 = Witness(2); - let q = x.div_euclid(y); - let r = x.rem_euclid(y); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let (q_w, r_w, extra_opcodes, _) = $uint::euclidean_division(&u32_1, &u32_2, 3); - let witness_assignments = BTreeMap::from([(Witness(1), lhs),(Witness(2), rhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&q_w.get_inner()).unwrap(), &FieldElement::from(q as u128)); - prop_assert_eq!(acvm.witness_map().get(&r_w.get_inner()).unwrap(), &FieldElement::from(r as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $add(x in 0..$u::MAX, y in 0..$u::MAX, z in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let rhs_z = FieldElement::from(z as u128); - let result = FieldElement::from(((x as u128).wrapping_add(y as u128) % (1_u128 << $size)).wrapping_add(z as u128) % (1_u128 << $size)); - let w1 = Witness(1); - let w2 = Witness(2); - let w3 = Witness(3); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let u32_3 = $uint::new(w3); - let mut opcodes = Vec::new(); - let (w, extra_opcodes, num_witness) = u32_1.add(&u32_2, 4); - opcodes.extend(extra_opcodes); - let (w2, extra_opcodes, _) = w.add(&u32_3, num_witness); - opcodes.extend(extra_opcodes); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs), (Witness(3), rhs_z)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w2.get_inner()).unwrap(), &result); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $sub(x in 0..$u::MAX, y in 0..$u::MAX, z in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let rhs_z = FieldElement::from(z as u128); - let result = FieldElement::from(((x as u128).wrapping_sub(y as u128) % (1_u128 << $size)).wrapping_sub(z as u128) % (1_u128 << $size)); - let w1 = Witness(1); - let w2 = Witness(2); - let w3 = Witness(3); - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let u32_3 = $uint::new(w3); - let mut opcodes = Vec::new(); - let (w, extra_opcodes, num_witness) = u32_1.sub(&u32_2, 4); - opcodes.extend(extra_opcodes); - let (w2, extra_opcodes, _) = w.sub(&u32_3, num_witness); - opcodes.extend(extra_opcodes); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs), (Witness(3), rhs_z)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w2.get_inner()).unwrap(), &result); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $left_shift(x in 0..$u::MAX, y in 0..32_u32) { - let lhs = FieldElement::from(x as u128); - let w1 = Witness(1); - let result = x.overflowing_shl(y).0; - let u32_1 = $uint::new(w1); - let (w, extra_opcodes, _) = u32_1.leftshift(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), lhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $right_shift(x in 0..$u::MAX, y in 0..32_u32) { - let lhs = FieldElement::from(x as u128); - let w1 = Witness(1); - let result = x.overflowing_shr(y).0; - let u32_1 = $uint::new(w1); - let (w, extra_opcodes, _) = u32_1.rightshift(y, 2); - let witness_assignments = BTreeMap::from([(Witness(1), lhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - - #[test] - fn $less_than(x in 0..$u::MAX, y in 0..$u::MAX) { - let lhs = FieldElement::from(x as u128); - let rhs = FieldElement::from(y as u128); - let w1 = Witness(1); - let w2 = Witness(2); - let result = x < y; - let u32_1 = $uint::new(w1); - let u32_2 = $uint::new(w2); - let (w, extra_opcodes, _) = u32_1.less_than_comparison(&u32_2, 3); - let witness_assignments = BTreeMap::from([(Witness(1), lhs), (Witness(2), rhs)]).into(); - let mut acvm = ACVM::new(&StubbedBackend, &extra_opcodes, witness_assignments); - let solver_status = acvm.solve(); - - prop_assert_eq!(acvm.witness_map().get(&w.get_inner()).unwrap(), &FieldElement::from(result as u128)); - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - } - }; -} - -test_hashes!(test_sha256, sha256, SHA256, does_not_support_sha256); -test_hashes!(test_blake2s, blake2s, Blake2s, does_not_support_blake2s); -test_hashes!(test_keccak, keccak256, Keccak256, does_not_support_keccak); - -fn does_not_support_sha256(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::SHA256 { .. })) -} -fn does_not_support_blake2s(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Blake2s { .. })) -} -fn does_not_support_keccak(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::Keccak256 { .. })) -} - -#[macro_export] -macro_rules! test_hashes { - ( - $name:ident, - $hasher:ident, - $opcode:ident, - $opcode_support: ident - ) => { - proptest! { - #![proptest_config(ProptestConfig::with_cases(3))] - #[test] - fn $name(input_values in proptest::collection::vec(0..u8::MAX, 1..50)) { - let mut opcodes = Vec::new(); - let mut witness_assignments = BTreeMap::new(); - let mut input_witnesses: Vec = Vec::new(); - let mut correct_result_witnesses: Vec = Vec::new(); - let mut output_witnesses: Vec = Vec::new(); - - // prepare test data - let mut counter = 0; - let output = $hasher(&input_values).unwrap(); - for inp_v in input_values { - counter += 1; - let function_input = FunctionInput { witness: Witness(counter), num_bits: 8 }; - input_witnesses.push(function_input); - witness_assignments.insert(Witness(counter), FieldElement::from(inp_v as u128)); - } - - for o_v in output { - counter += 1; - correct_result_witnesses.push(Witness(counter)); - witness_assignments.insert(Witness(counter), FieldElement::from(o_v as u128)); - } - - for _ in 0..32 { - counter += 1; - output_witnesses.push(Witness(counter)); - } - let blackbox = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::$opcode { inputs: input_witnesses, outputs: output_witnesses.clone() }); - opcodes.push(blackbox); - - // constrain the output to be the same as the hasher - for i in 0..correct_result_witnesses.len() { - let mut output_constraint = Expression::from(correct_result_witnesses[i]); - output_constraint.push_addition_term(-FieldElement::one(), output_witnesses[i]); - opcodes.push(Opcode::Arithmetic(output_constraint)); - } - - // compile circuit - let circuit = Circuit { - current_witness_index: witness_assignments.len() as u32 + 32, - opcodes, - private_parameters: BTreeSet::new(), // This is not correct but is unused in this test. - ..Circuit::default() - }; - let circuit = compile(circuit, Language::PLONKCSat{ width: 3 }, $opcode_support).unwrap().0; - - // solve witnesses - let mut acvm = ACVM::new(&StubbedBackend, &circuit.opcodes, witness_assignments.into()); - let solver_status = acvm.solve(); - - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } - } - }; -} - -fn does_not_support_hash_to_field(opcode: &Opcode) -> bool { - !matches!(opcode, Opcode::BlackBoxFuncCall(BlackBoxFuncCall::HashToField128Security { .. })) -} - -proptest! { - #![proptest_config(ProptestConfig::with_cases(3))] - #[test] - fn test_hash_to_field(input_values in proptest::collection::vec(0..u8::MAX, 1..50)) { - let mut opcodes = Vec::new(); - let mut witness_assignments = BTreeMap::new(); - let mut input_witnesses: Vec = Vec::new(); - - // prepare test data - let mut counter = 0; - let output = hash_to_field_128_security(&input_values).unwrap(); - for inp_v in input_values { - counter += 1; - let function_input = FunctionInput { witness: Witness(counter), num_bits: 8 }; - input_witnesses.push(function_input); - witness_assignments.insert(Witness(counter), FieldElement::from(inp_v as u128)); - } - - counter += 1; - let correct_result_witnesses: Witness = Witness(counter); - witness_assignments.insert(Witness(counter), output); - - counter += 1; - let output_witness: Witness = Witness(counter); - - let blackbox = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::HashToField128Security { inputs: input_witnesses, output: output_witness }); - opcodes.push(blackbox); - - // constrain the output to be the same as the hasher - let mut output_constraint = Expression::from(correct_result_witnesses); - output_constraint.push_addition_term(-FieldElement::one(), output_witness); - opcodes.push(Opcode::Arithmetic(output_constraint)); - - // compile circuit - let circuit = Circuit { - current_witness_index: witness_assignments.len() as u32 + 1, - opcodes, - private_parameters: BTreeSet::new(), // This is not correct but is unused in this test. - ..Circuit::default() - }; - let circuit = compile(circuit, Language::PLONKCSat{ width: 3 }, does_not_support_hash_to_field).unwrap().0; - - // solve witnesses - let mut acvm = ACVM::new(&StubbedBackend, &circuit.opcodes, witness_assignments.into()); - let solver_status = acvm.solve(); - - prop_assert_eq!(solver_status, ACVMStatus::Solved, "should be fully solved"); - } -} diff --git a/acvm-repo/acvm_js/Cargo.toml b/acvm-repo/acvm_js/Cargo.toml index f6054b49de1..226e273c306 100644 --- a/acvm-repo/acvm_js/Cargo.toml +++ b/acvm-repo/acvm_js/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_js" description = "Typescript wrapper around the ACVM allowing execution of ACIR code" # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -19,17 +19,17 @@ crate-type = ["cdylib"] cfg-if = "1.0.0" [target.'cfg(target_arch = "wasm32")'.dependencies] -acvm = { path = "../acvm", default-features = false } -barretenberg_blackbox_solver = { path = "../barretenberg_blackbox_solver", default-features = false } +acvm.workspace = true +bn254_blackbox_solver = { workspace = true, optional = true } wasm-bindgen.workspace = true wasm-bindgen-futures.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -js-sys.workspace = true -log.workspace = true +js-sys.workspace = true +tracing-subscriber.workspace = true +tracing-web.workspace = true serde = { version = "1.0.136", features = ["derive"] } -wasm-logger = "0.2.0" const-str = "0.5.5" [build-dependencies] @@ -41,5 +41,5 @@ wasm-bindgen-test.workspace = true [features] default = ["bn254"] -bn254 = ["acvm/bn254", "barretenberg_blackbox_solver/bn254"] -bls12_381 = ["acvm/bls12_381", "barretenberg_blackbox_solver/bls12_381"] +bn254 = ["acvm/bn254", "dep:bn254_blackbox_solver"] +bls12_381 = ["acvm/bls12_381"] diff --git a/acvm-repo/acvm_js/package.json b/acvm-repo/acvm_js/package.json index a890ca40080..4ec9b1a2da3 100644 --- a/acvm-repo/acvm_js/package.json +++ b/acvm-repo/acvm_js/package.json @@ -1,16 +1,21 @@ { "name": "@noir-lang/acvm_js", - "version": "0.37.0", - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/acvm.git" - }, + "version": "0.39.0", "publishConfig": { "access": "public" }, - "collaborators": [ + "contributors": [ "The Noir Team " ], + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "acvm_repo/acvm_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "license": "MIT", "main": "./nodejs/acvm_js.js", "types": "./web/acvm_js.d.ts", diff --git a/acvm-repo/acvm_js/src/black_box_solvers.rs b/acvm-repo/acvm_js/src/black_box_solvers.rs index cc3edc3de04..fc0e3b28ebf 100644 --- a/acvm-repo/acvm_js/src/black_box_solvers.rs +++ b/acvm-repo/acvm_js/src/black_box_solvers.rs @@ -40,21 +40,6 @@ pub fn keccak256(inputs: &[u8]) -> Vec { acvm::blackbox_solver::keccak256(inputs).unwrap().into() } -/// Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. -// #[wasm_bindgen] -// pub fn hash_to_field_128_security(inputs: Vec) -> JsString { -// let input_bytes: Vec = inputs -// .into_iter() -// .flat_map(|field_string| { -// let field_element = js_value_to_field_element(field_string.into()).unwrap(); -// witness_assignment.fetch_nearest_bytes(FieldElement::max_num_bits()); -// }) -// .collect(); -// field_element_to_js_string( -// &acvm::blackbox_solver::hash_to_field_128_security(&input_bytes).unwrap(), -// ) -// } - /// Verifies a ECDSA signature over the secp256k1 curve. #[wasm_bindgen] pub fn ecdsa_secp256k1_verify( diff --git a/acvm-repo/acvm_js/src/execute.rs b/acvm-repo/acvm_js/src/execute.rs index 81e2a11ed5a..3f691e1abf2 100644 --- a/acvm-repo/acvm_js/src/execute.rs +++ b/acvm-repo/acvm_js/src/execute.rs @@ -2,8 +2,7 @@ use acvm::{ acir::circuit::Circuit, pwg::{ACVMStatus, ErrorLocation, OpcodeResolutionError, ACVM}, }; -#[allow(deprecated)] -use barretenberg_blackbox_solver::BarretenbergSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use js_sys::Error; use wasm_bindgen::prelude::wasm_bindgen; @@ -14,13 +13,11 @@ use crate::{ }; #[wasm_bindgen] -#[allow(deprecated)] -pub struct WasmBlackBoxFunctionSolver(BarretenbergSolver); +pub struct WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver); impl WasmBlackBoxFunctionSolver { async fn initialize() -> WasmBlackBoxFunctionSolver { - #[allow(deprecated)] - WasmBlackBoxFunctionSolver(BarretenbergSolver::initialize().await) + WasmBlackBoxFunctionSolver(Bn254BlackBoxSolver::initialize().await) } } @@ -64,8 +61,8 @@ pub async fn execute_circuit_with_black_box_solver( foreign_call_handler: ForeignCallHandler, ) -> Result { console_error_panic_hook::set_once(); - let circuit: Circuit = - Circuit::deserialize_circuit(&circuit).expect("Failed to deserialize circuit"); + let circuit: Circuit = Circuit::deserialize_circuit(&circuit) + .map_err(|_| JsExecutionError::new("Failed to deserialize circuit. This is likely due to differing serialization formats between ACVM_JS and your compiler".to_string(), None))?; let mut acvm = ACVM::new(&solver.0, &circuit.opcodes, initial_witness.into()); diff --git a/acvm-repo/acvm_js/src/lib.rs b/acvm-repo/acvm_js/src/lib.rs index ba2a37bf984..88afd1767c9 100644 --- a/acvm-repo/acvm_js/src/lib.rs +++ b/acvm-repo/acvm_js/src/lib.rs @@ -24,7 +24,7 @@ cfg_if::cfg_if! { pub use compression::{compress_witness, decompress_witness}; pub use execute::{execute_circuit, execute_circuit_with_black_box_solver, create_black_box_solver}; pub use js_witness_map::JsWitnessMap; - pub use logging::{init_log_level, LogLevel}; + pub use logging::init_log_level; pub use public_witness::{get_public_parameters_witness, get_public_witness, get_return_witness}; pub use js_execution_error::JsExecutionError; } diff --git a/acvm-repo/acvm_js/src/logging.rs b/acvm-repo/acvm_js/src/logging.rs index d939c5f8367..f5d71fae067 100644 --- a/acvm-repo/acvm_js/src/logging.rs +++ b/acvm-repo/acvm_js/src/logging.rs @@ -1,31 +1,26 @@ -use js_sys::JsString; -use log::Level; -use std::str::FromStr; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; use wasm_bindgen::prelude::*; -#[wasm_bindgen(typescript_custom_section)] -const LOG_LEVEL: &'static str = r#" -export type LogLevel = "OFF" | "ERROR" | "WARN" | "INFO" | "DEBUG" | "TRACE"; -"#; - -#[wasm_bindgen] -extern "C" { - #[wasm_bindgen(extends = JsString, typescript_type = "LogLevel")] - pub type LogLevel; -} - /// Sets the package's logging level. /// /// @param {LogLevel} level - The maximum level of logging to be emitted. #[wasm_bindgen(js_name = initLogLevel, skip_jsdoc)] -pub fn init_log_level(level: LogLevel) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = level.as_string().unwrap(); - let log_level = Level::from_str(&log_level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts index 9aec1403f6c..3c54fe8e38f 100644 --- a/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts +++ b/acvm-repo/acvm_js/test/browser/black_box_solvers.test.ts @@ -64,18 +64,6 @@ it('successfully calculates the keccak256 hash', async () => { } }); -// it("successfully calculates the hash_to_field_128_security field", async () => { -// const { hash_to_field_128_security_test_cases } = await import( -// "../shared/black_box_solvers" -// ); - -// for (const testCase of hash_to_field_128_security_test_cases) { -// const [preimage, expectedResult] = testCase; -// const hashField = hash_to_field_128_security(preimage); -// expect(hashField).to.be.eq(expectedResult); -// } -// }); - it('successfully verifies secp256k1 ECDSA signatures', async () => { const { ecdsa_secp256k1_test_cases } = await import('../shared/black_box_solvers'); diff --git a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts index 925c1a07eb8..259c51ed1c6 100644 --- a/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/browser/execute_circuit.test.ts @@ -53,7 +53,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -79,7 +79,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts index b28b9e72591..adee3c15312 100644 --- a/acvm-repo/acvm_js/test/node/execute_circuit.test.ts +++ b/acvm-repo/acvm_js/test/node/execute_circuit.test.ts @@ -46,7 +46,7 @@ it('successfully processes simple brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); @@ -72,7 +72,7 @@ it('successfully processes complex brillig foreign call opcodes', async () => { expect(observedInputs).to.be.deep.eq(oracleCallInputs); // If incorrect value is written into circuit then execution should halt due to unsatisfied constraint in - // arithmetic opcode. Nevertheless, check that returned value was inserted correctly. + // assert-zero opcode. Nevertheless, check that returned value was inserted correctly. expect(solved_witness).to.be.deep.eq(expectedWitnessMap); }); diff --git a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts index a5b6d1ac996..0ab3fc12b72 100644 --- a/acvm-repo/acvm_js/test/shared/black_box_solvers.ts +++ b/acvm-repo/acvm_js/test/shared/black_box_solvers.ts @@ -66,13 +66,6 @@ export const keccak256_test_cases: [Uint8Array, Uint8Array][] = [ ], ]; -// export const hash_to_field_128_security_test_cases: [string[], string][] = [ -// [ -// ["0x0000000000000000000000000000000000000000000000000000000000000001"], -// "0x25cebc29ded2fa515a937e2b5f674e3026c012e5b57f8a48d7dce6b7d274f9d9", -// ], -// ]; - export const ecdsa_secp256k1_test_cases: [[Uint8Array, Uint8Array, Uint8Array, Uint8Array], boolean][] = [ [ [ diff --git a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts index 1b6f5e4319a..ff444879eea 100644 --- a/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts +++ b/acvm-repo/acvm_js/test/shared/complex_foreign_call.ts @@ -2,11 +2,11 @@ import { WitnessMap } from '@noir-lang/acvm_js'; // See `complex_brillig_foreign_call` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 83, 219, 10, 128, 48, 8, 117, 174, 139, 159, 179, 254, 160, 127, 137, 222, - 138, 122, 236, 243, 19, 114, 32, 22, 244, 144, 131, 118, 64, 156, 178, 29, 14, 59, 74, 0, 16, 224, 66, 228, 64, 57, 7, - 169, 53, 242, 189, 81, 114, 250, 134, 33, 248, 113, 165, 82, 26, 177, 2, 141, 177, 128, 198, 60, 15, 63, 245, 219, - 211, 23, 215, 255, 139, 15, 251, 211, 112, 180, 28, 157, 212, 189, 100, 82, 179, 64, 170, 63, 109, 235, 190, 204, 135, - 166, 178, 150, 216, 62, 154, 252, 250, 70, 147, 35, 220, 119, 93, 227, 4, 182, 131, 81, 25, 36, 4, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 213, 84, 219, 10, 128, 48, 8, 117, 174, 203, 62, 103, 253, 65, 255, 18, 189, 21, + 245, 216, 231, 55, 200, 193, 193, 122, 137, 28, 180, 3, 226, 20, 39, 135, 29, 103, 32, 34, 71, 23, 124, 50, 150, 179, + 147, 24, 145, 235, 70, 241, 241, 27, 6, 103, 215, 43, 150, 226, 200, 21, 112, 244, 5, 56, 230, 121, 248, 169, 222, + 150, 186, 152, 190, 159, 127, 248, 63, 77, 178, 54, 89, 39, 113, 47, 62, 192, 44, 4, 200, 79, 219, 186, 47, 243, 129, + 173, 180, 36, 152, 211, 49, 43, 255, 234, 62, 22, 48, 221, 119, 0, 226, 4, 104, 45, 56, 241, 60, 4, 0, 0, ]); export const initialWitnessMap: WitnessMap = new Map([ [1, '0x0000000000000000000000000000000000000000000000000000000000000001'], diff --git a/acvm-repo/acvm_js/test/shared/pedersen.ts b/acvm-repo/acvm_js/test/shared/pedersen.ts index 668ee2b510b..e35893fc355 100644 --- a/acvm-repo/acvm_js/test/shared/pedersen.ts +++ b/acvm-repo/acvm_js/test/shared/pedersen.ts @@ -1,7 +1,7 @@ // See `pedersen_circuit` integration test in `acir/tests/test_program_serialization.rs`. export const bytecode = Uint8Array.from([ - 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 250, 255, 139, 163, 162, 130, 72, 16, 149, - 241, 3, 135, 84, 164, 172, 173, 213, 175, 251, 45, 198, 96, 243, 211, 50, 152, 67, 220, 211, 92, 0, 0, 0, + 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 93, 138, 9, 10, 0, 64, 8, 2, 103, 15, 232, 255, 31, 142, 138, 10, 34, 65, 84, 198, + 15, 28, 82, 145, 178, 182, 86, 191, 238, 183, 24, 131, 205, 79, 203, 0, 166, 242, 158, 93, 92, 0, 0, 0, ]); export const initialWitnessMap = new Map([[1, '0x0000000000000000000000000000000000000000000000000000000000000001']]); diff --git a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts index f88a70ba4a1..5716cbd30f8 100644 --- a/acvm-repo/acvm_js/test/shared/schnorr_verify.ts +++ b/acvm-repo/acvm_js/test/shared/schnorr_verify.ts @@ -2,7 +2,7 @@ export const bytecode = Uint8Array.from([ 31, 139, 8, 0, 0, 0, 0, 0, 0, 255, 77, 210, 87, 78, 2, 1, 20, 134, 209, 177, 247, 222, 123, 71, 68, 68, 68, 68, 68, 68, 68, 68, 68, 221, 133, 251, 95, 130, 145, 27, 206, 36, 78, 50, 57, 16, 94, 200, 253, 191, 159, 36, 73, 134, 146, - 193, 19, 142, 241, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, + 193, 19, 142, 243, 183, 255, 14, 179, 233, 247, 145, 254, 59, 217, 127, 71, 57, 198, 113, 78, 48, 125, 167, 56, 205, 25, 206, 114, 142, 243, 92, 224, 34, 151, 184, 204, 21, 174, 114, 141, 235, 220, 224, 38, 183, 184, 205, 29, 238, 114, 143, 251, 60, 224, 33, 143, 120, 204, 19, 158, 242, 140, 25, 158, 51, 203, 11, 230, 120, 201, 60, 175, 88, 224, 53, 139, 188, 97, 137, 183, 44, 243, 142, 21, 222, 179, 202, 7, 214, 248, 200, 58, 159, 216, 224, 51, 155, 124, 97, 235, @@ -11,7 +11,7 @@ export const bytecode = Uint8Array.from([ 162, 149, 232, 36, 26, 137, 62, 162, 141, 232, 34, 154, 136, 30, 162, 133, 232, 32, 26, 136, 253, 99, 251, 195, 100, 176, 121, 236, 29, 91, 159, 218, 56, 99, 219, 172, 77, 115, 182, 204, 219, 176, 96, 187, 162, 205, 74, 182, 42, 219, 168, 98, 155, 170, 77, 106, 182, 168, 219, 160, 225, 246, 77, 55, 111, 185, 113, 219, 109, 59, 110, 218, 117, 203, - 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, 19, 89, 159, 101, 220, 3, 0, 0, + 158, 27, 166, 55, 75, 239, 150, 184, 101, 250, 252, 1, 55, 204, 92, 74, 220, 3, 0, 0, ]); export const initialWitnessMap = new Map([ diff --git a/acvm-repo/blackbox_solver/Cargo.toml b/acvm-repo/blackbox_solver/Cargo.toml index aaff52c3c04..7359cf307e4 100644 --- a/acvm-repo/blackbox_solver/Cargo.toml +++ b/acvm-repo/blackbox_solver/Cargo.toml @@ -2,7 +2,7 @@ name = "acvm_blackbox_solver" description = "A solver for the blackbox functions found in ACIR and Brillig" # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -17,8 +17,10 @@ acir.workspace = true thiserror.workspace = true blake2 = "0.10.6" +blake3 = "1.5.0" sha2 = "0.10.6" sha3 = "0.10.6" +keccak = "0.1.4" k256 = { version = "0.11.0", features = [ "ecdsa", "ecdsa-core", diff --git a/acvm-repo/blackbox_solver/src/curve_specific_solver.rs b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs new file mode 100644 index 00000000000..82941e91d61 --- /dev/null +++ b/acvm-repo/blackbox_solver/src/curve_specific_solver.rs @@ -0,0 +1,104 @@ +use acir::{BlackBoxFunc, FieldElement}; + +use crate::BlackBoxResolutionError; + +/// This component will generate outputs for Blackbox function calls where the underlying [`acir::BlackBoxFunc`] +/// doesn't have a canonical Rust implementation. +/// +/// Returns an [`BlackBoxResolutionError`] if the backend does not support the given [`acir::BlackBoxFunc`]. +pub trait BlackBoxFunctionSolver { + fn schnorr_verify( + &self, + public_key_x: &FieldElement, + public_key_y: &FieldElement, + signature: &[u8], + message: &[u8], + ) -> Result; + fn pedersen_commitment( + &self, + inputs: &[FieldElement], + domain_separator: u32, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn pedersen_hash( + &self, + inputs: &[FieldElement], + domain_separator: u32, + ) -> Result; + fn fixed_base_scalar_mul( + &self, + low: &FieldElement, + high: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn ec_add( + &self, + input1_x: &FieldElement, + input1_y: &FieldElement, + input2_x: &FieldElement, + input2_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; + fn ec_double( + &self, + input_x: &FieldElement, + input_x: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; +} + +pub struct StubbedBlackBoxSolver; + +impl StubbedBlackBoxSolver { + fn fail(black_box_function: BlackBoxFunc) -> BlackBoxResolutionError { + BlackBoxResolutionError::Failed( + black_box_function, + format!("{} is not supported", black_box_function.name()), + ) + } +} + +impl BlackBoxFunctionSolver for StubbedBlackBoxSolver { + fn schnorr_verify( + &self, + _public_key_x: &FieldElement, + _public_key_y: &FieldElement, + _signature: &[u8], + _message: &[u8], + ) -> Result { + Err(Self::fail(BlackBoxFunc::SchnorrVerify)) + } + fn pedersen_commitment( + &self, + _inputs: &[FieldElement], + _domain_separator: u32, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::PedersenCommitment)) + } + fn pedersen_hash( + &self, + _inputs: &[FieldElement], + _domain_separator: u32, + ) -> Result { + Err(Self::fail(BlackBoxFunc::PedersenHash)) + } + fn fixed_base_scalar_mul( + &self, + _low: &FieldElement, + _high: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::FixedBaseScalarMul)) + } + fn ec_add( + &self, + _input1_x: &FieldElement, + _input1_y: &FieldElement, + _input2_x: &FieldElement, + _input2_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::EmbeddedCurveAdd)) + } + fn ec_double( + &self, + _input_x: &FieldElement, + _input_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Err(Self::fail(BlackBoxFunc::EmbeddedCurveDouble)) + } +} diff --git a/acvm-repo/blackbox_solver/src/lib.rs b/acvm-repo/blackbox_solver/src/lib.rs index 13d0f562415..afba4eff17c 100644 --- a/acvm-repo/blackbox_solver/src/lib.rs +++ b/acvm-repo/blackbox_solver/src/lib.rs @@ -7,50 +7,23 @@ //! For functions that are backend-dependent, it provides a Trait [BlackBoxFunctionSolver] that must be implemented by the backend. //! For functions that have a reference implementation, such as [keccak256], this crate exports the reference implementation directly. -use acir::{BlackBoxFunc, FieldElement}; +use acir::BlackBoxFunc; use blake2::digest::generic_array::GenericArray; use blake2::{Blake2s256, Digest}; use sha2::Sha256; use sha3::Keccak256; use thiserror::Error; +mod curve_specific_solver; + +pub use curve_specific_solver::{BlackBoxFunctionSolver, StubbedBlackBoxSolver}; + #[derive(Clone, PartialEq, Eq, Debug, Error)] pub enum BlackBoxResolutionError { - #[error("unsupported blackbox function: {0}")] - Unsupported(BlackBoxFunc), #[error("failed to solve blackbox function: {0}, reason: {1}")] Failed(BlackBoxFunc, String), } -/// This component will generate outputs for Blackbox function calls where the underlying [`acir::BlackBoxFunc`] -/// doesn't have a canonical Rust implementation. -/// -/// Returns an [`BlackBoxResolutionError`] if the backend does not support the given [`acir::BlackBoxFunc`]. -pub trait BlackBoxFunctionSolver { - fn schnorr_verify( - &self, - public_key_x: &FieldElement, - public_key_y: &FieldElement, - signature: &[u8], - message: &[u8], - ) -> Result; - fn pedersen_commitment( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; - fn pedersen_hash( - &self, - inputs: &[FieldElement], - domain_separator: u32, - ) -> Result; - fn fixed_base_scalar_mul( - &self, - low: &FieldElement, - high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError>; -} - pub fn sha256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { generic_hash_256::(inputs) .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::SHA256, err)) @@ -61,14 +34,22 @@ pub fn blake2s(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Blake2s, err)) } +pub fn blake3(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { + Ok(blake3::hash(inputs).into()) +} + pub fn keccak256(inputs: &[u8]) -> Result<[u8; 32], BlackBoxResolutionError> { generic_hash_256::(inputs) .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::Keccak256, err)) } -pub fn hash_to_field_128_security(inputs: &[u8]) -> Result { - generic_hash_to_field::(inputs) - .map_err(|err| BlackBoxResolutionError::Failed(BlackBoxFunc::HashToField128Security, err)) +const KECCAK_LANES: usize = 25; + +pub fn keccakf1600( + mut state: [u64; KECCAK_LANES], +) -> Result<[u64; KECCAK_LANES], BlackBoxResolutionError> { + keccak::f1600(&mut state); + Ok(state) } pub fn ecdsa_secp256k1_verify( @@ -97,14 +78,6 @@ fn generic_hash_256(message: &[u8]) -> Result<[u8; 32], String> { Ok(output_bytes) } -/// Does a generic hash of the entire inputs converting the resulting hash into a single output field. -fn generic_hash_to_field(message: &[u8]) -> Result { - let output_bytes: [u8; 32] = - D::digest(message).as_slice().try_into().map_err(|_| "digest should be 256 bits")?; - - Ok(FieldElement::from_be_bytes_reduce(&output_bytes)) -} - fn verify_secp256k1_ecdsa_signature( hashed_msg: &[u8], public_key_x_bytes: &[u8; 32], @@ -240,6 +213,79 @@ fn verify_secp256r1_ecdsa_signature( } } +#[cfg(test)] +mod keccakf1600_tests { + use crate::keccakf1600; + + #[test] + fn sanity_check() { + // Test vectors are copied from XKCP (eXtended Keccak Code Package) + // https://github.com/XKCP/XKCP/blob/master/tests/TestVectors/KeccakF-1600-IntermediateValues.txt + let zero_state = [0u64; 25]; + + let expected_state_first = [ + 0xF1258F7940E1DDE7, + 0x84D5CCF933C0478A, + 0xD598261EA65AA9EE, + 0xBD1547306F80494D, + 0x8B284E056253D057, + 0xFF97A42D7F8E6FD4, + 0x90FEE5A0A44647C4, + 0x8C5BDA0CD6192E76, + 0xAD30A6F71B19059C, + 0x30935AB7D08FFC64, + 0xEB5AA93F2317D635, + 0xA9A6E6260D712103, + 0x81A57C16DBCF555F, + 0x43B831CD0347C826, + 0x01F22F1A11A5569F, + 0x05E5635A21D9AE61, + 0x64BEFEF28CC970F2, + 0x613670957BC46611, + 0xB87C5A554FD00ECB, + 0x8C3EE88A1CCF32C8, + 0x940C7922AE3A2614, + 0x1841F924A2C509E4, + 0x16F53526E70465C2, + 0x75F644E97F30A13B, + 0xEAF1FF7B5CECA249, + ]; + let expected_state_second = [ + 0x2D5C954DF96ECB3C, + 0x6A332CD07057B56D, + 0x093D8D1270D76B6C, + 0x8A20D9B25569D094, + 0x4F9C4F99E5E7F156, + 0xF957B9A2DA65FB38, + 0x85773DAE1275AF0D, + 0xFAF4F247C3D810F7, + 0x1F1B9EE6F79A8759, + 0xE4FECC0FEE98B425, + 0x68CE61B6B9CE68A1, + 0xDEEA66C4BA8F974F, + 0x33C43D836EAFB1F5, + 0xE00654042719DBD9, + 0x7CF8A9F009831265, + 0xFD5449A6BF174743, + 0x97DDAD33D8994B40, + 0x48EAD5FC5D0BE774, + 0xE3B8C8EE55B7B03C, + 0x91A0226E649E42E9, + 0x900E3129E7BADD7B, + 0x202A9EC5FAA3CCE8, + 0x5B3402464E1C3DB6, + 0x609F4E62A44C1059, + 0x20D06CD26A8FBF5C, + ]; + + let state_first = keccakf1600(zero_state).unwrap(); + let state_second = keccakf1600(state_first).unwrap(); + + assert_eq!(state_first, expected_state_first); + assert_eq!(state_second, expected_state_second); + } +} + #[cfg(test)] mod secp256k1_tests { use super::verify_secp256k1_ecdsa_signature; diff --git a/acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md b/acvm-repo/bn254_blackbox_solver/CHANGELOG.md similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/CHANGELOG.md rename to acvm-repo/bn254_blackbox_solver/CHANGELOG.md diff --git a/acvm-repo/barretenberg_blackbox_solver/Cargo.toml b/acvm-repo/bn254_blackbox_solver/Cargo.toml similarity index 86% rename from acvm-repo/barretenberg_blackbox_solver/Cargo.toml rename to acvm-repo/bn254_blackbox_solver/Cargo.toml index bcf02eeab09..a73aded231f 100644 --- a/acvm-repo/barretenberg_blackbox_solver/Cargo.toml +++ b/acvm-repo/bn254_blackbox_solver/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "barretenberg_blackbox_solver" -description = "A wrapper around a barretenberg WASM binary to execute black box functions for which there is no rust implementation" +name = "bn254_blackbox_solver" +description = "Solvers for black box functions which are specific for the bn254 curve" # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true @@ -54,4 +54,3 @@ reqwest = { version = "0.11.20", default-features = false, features = [ [features] default = ["bn254"] bn254 = ["acir/bn254"] -bls12_381 = ["acir/bls12_381"] diff --git a/acvm-repo/barretenberg_blackbox_solver/build.rs b/acvm-repo/bn254_blackbox_solver/build.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/build.rs rename to acvm-repo/bn254_blackbox_solver/build.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm b/acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/acvm_backend.wasm rename to acvm-repo/bn254_blackbox_solver/src/acvm_backend.wasm diff --git a/acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs b/acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/fixed_base_scalar_mul.rs rename to acvm-repo/bn254_blackbox_solver/src/fixed_base_scalar_mul.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/lib.rs b/acvm-repo/bn254_blackbox_solver/src/lib.rs similarity index 75% rename from acvm-repo/barretenberg_blackbox_solver/src/lib.rs rename to acvm-repo/bn254_blackbox_solver/src/lib.rs index 5d2ab834536..92c45e93dea 100644 --- a/acvm-repo/barretenberg_blackbox_solver/src/lib.rs +++ b/acvm-repo/bn254_blackbox_solver/src/lib.rs @@ -13,36 +13,32 @@ use wasm::Barretenberg; use self::wasm::{Pedersen, SchnorrSig}; -#[deprecated = "The `BarretenbergSolver` is a temporary solution and will be removed in future."] -pub struct BarretenbergSolver { +pub struct Bn254BlackBoxSolver { blackbox_vendor: Barretenberg, } -#[allow(deprecated)] -impl BarretenbergSolver { +impl Bn254BlackBoxSolver { #[cfg(target_arch = "wasm32")] - pub async fn initialize() -> BarretenbergSolver { + pub async fn initialize() -> Bn254BlackBoxSolver { let blackbox_vendor = Barretenberg::initialize().await; - BarretenbergSolver { blackbox_vendor } + Bn254BlackBoxSolver { blackbox_vendor } } #[cfg(not(target_arch = "wasm32"))] - pub fn new() -> BarretenbergSolver { + pub fn new() -> Bn254BlackBoxSolver { let blackbox_vendor = Barretenberg::new(); - BarretenbergSolver { blackbox_vendor } + Bn254BlackBoxSolver { blackbox_vendor } } } #[cfg(not(target_arch = "wasm32"))] -#[allow(deprecated)] -impl Default for BarretenbergSolver { +impl Default for Bn254BlackBoxSolver { fn default() -> Self { Self::new() } } -#[allow(deprecated)] -impl BlackBoxFunctionSolver for BarretenbergSolver { +impl BlackBoxFunctionSolver for Bn254BlackBoxSolver { fn schnorr_verify( &self, public_key_x: &FieldElement, @@ -57,7 +53,6 @@ impl BlackBoxFunctionSolver for BarretenbergSolver { let sig_s: [u8; 32] = signature[0..32].try_into().unwrap(); let sig_e: [u8; 32] = signature[32..64].try_into().unwrap(); - #[allow(deprecated)] self.blackbox_vendor.verify_signature(pub_key, sig_s, sig_e, message).map_err(|err| { BlackBoxResolutionError::Failed(BlackBoxFunc::SchnorrVerify, err.to_string()) }) @@ -92,4 +87,22 @@ impl BlackBoxFunctionSolver for BarretenbergSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { fixed_base_scalar_mul(low, high) } + + fn ec_add( + &self, + _input1_x: &FieldElement, + _input1_y: &FieldElement, + _input2_x: &FieldElement, + _input2_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + todo!(); + } + + fn ec_double( + &self, + _input_x: &FieldElement, + _input_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + todo!(); + } } diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/barretenberg_structures.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/barretenberg_structures.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/mod.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/mod.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/pedersen.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/pedersen.rs diff --git a/acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs b/acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs similarity index 100% rename from acvm-repo/barretenberg_blackbox_solver/src/wasm/schnorr.rs rename to acvm-repo/bn254_blackbox_solver/src/wasm/schnorr.rs diff --git a/acvm-repo/brillig/Cargo.toml b/acvm-repo/brillig/Cargo.toml index 47f9b3f429e..b9cedfe8d60 100644 --- a/acvm-repo/brillig/Cargo.toml +++ b/acvm-repo/brillig/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig" description = "Brillig is the bytecode ACIR uses for non-determinism." # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig/src/black_box.rs b/acvm-repo/brillig/src/black_box.rs index 16f91c51baa..e4b0834de2c 100644 --- a/acvm-repo/brillig/src/black_box.rs +++ b/acvm-repo/brillig/src/black_box.rs @@ -9,14 +9,12 @@ pub enum BlackBoxOp { Sha256 { message: HeapVector, output: HeapArray }, /// Calculates the Blake2s hash of the inputs. Blake2s { message: HeapVector, output: HeapArray }, + /// Calculates the Blake3 hash of the inputs. + Blake3 { message: HeapVector, output: HeapArray }, /// Calculates the Keccak256 hash of the inputs. Keccak256 { message: HeapVector, output: HeapArray }, - /// Hashes a set of inputs and applies the field modulus to the result - /// to return a value which can be represented as a [`FieldElement`][acir_field::FieldElement] - /// - /// This is implemented using the `Blake2s` hash function. - /// The "128" in the name specifies that this function should have 128 bits of security. - HashToField128Security { message: HeapVector, output: RegisterIndex }, + /// Keccak Permutation function of 1600 width + Keccakf1600 { message: HeapVector, output: HeapArray }, /// Verifies a ECDSA signature over the secp256k1 curve. EcdsaSecp256k1 { hashed_msg: HeapVector, @@ -47,4 +45,14 @@ pub enum BlackBoxOp { PedersenHash { inputs: HeapVector, domain_separator: RegisterIndex, output: RegisterIndex }, /// Performs scalar multiplication over the embedded curve. FixedBaseScalarMul { low: RegisterIndex, high: RegisterIndex, result: HeapArray }, + /// Performs addition over the embedded curve. + EmbeddedCurveAdd { + input1_x: RegisterIndex, + input1_y: RegisterIndex, + input2_x: RegisterIndex, + input2_y: RegisterIndex, + result: HeapArray, + }, + /// Performs point doubling over the embedded curve. + EmbeddedCurveDouble { input1_x: RegisterIndex, input1_y: RegisterIndex, result: HeapArray }, } diff --git a/acvm-repo/brillig/src/opcodes.rs b/acvm-repo/brillig/src/opcodes.rs index 09a6eeab236..6b126691166 100644 --- a/acvm-repo/brillig/src/opcodes.rs +++ b/acvm-repo/brillig/src/opcodes.rs @@ -156,28 +156,6 @@ pub enum BrilligOpcode { Stop, } -impl BrilligOpcode { - pub fn name(&self) -> &'static str { - match self { - BrilligOpcode::BinaryFieldOp { .. } => "binary_field_op", - BrilligOpcode::BinaryIntOp { .. } => "binary_int_op", - BrilligOpcode::JumpIfNot { .. } => "jmp_if_not", - BrilligOpcode::JumpIf { .. } => "jmp_if", - BrilligOpcode::Jump { .. } => "jmp", - BrilligOpcode::Call { .. } => "call", - BrilligOpcode::Const { .. } => "const", - BrilligOpcode::Return => "return", - BrilligOpcode::ForeignCall { .. } => "foreign_call", - BrilligOpcode::Mov { .. } => "mov", - BrilligOpcode::Load { .. } => "load", - BrilligOpcode::Store { .. } => "store", - BrilligOpcode::BlackBox(_) => "black_box", - BrilligOpcode::Trap => "trap", - BrilligOpcode::Stop => "stop", - } - } -} - /// Binary fixed-length field expressions #[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] pub enum BinaryFieldOp { diff --git a/acvm-repo/brillig_vm/Cargo.toml b/acvm-repo/brillig_vm/Cargo.toml index 8c533352dd5..5a8a34be881 100644 --- a/acvm-repo/brillig_vm/Cargo.toml +++ b/acvm-repo/brillig_vm/Cargo.toml @@ -2,7 +2,7 @@ name = "brillig_vm" description = "The virtual machine that processes Brillig bytecode, used to introduce non-determinism to the ACVM" # x-release-please-start-version -version = "0.37.0" +version = "0.39.0" # x-release-please-end authors.workspace = true edition.workspace = true diff --git a/acvm-repo/brillig_vm/src/black_box.rs b/acvm-repo/brillig_vm/src/black_box.rs index f254c758c7c..8e8512ec535 100644 --- a/acvm-repo/brillig_vm/src/black_box.rs +++ b/acvm-repo/brillig_vm/src/black_box.rs @@ -1,7 +1,7 @@ use acir::brillig::{BlackBoxOp, HeapArray, HeapVector, Value}; use acir::{BlackBoxFunc, FieldElement}; use acvm_blackbox_solver::{ - blake2s, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, hash_to_field_128_security, keccak256, + blake2s, blake3, ecdsa_secp256k1_verify, ecdsa_secp256r1_verify, keccak256, keccakf1600, sha256, BlackBoxFunctionSolver, BlackBoxResolutionError, }; @@ -58,17 +58,30 @@ pub(crate) fn evaluate_black_box( memory.write_slice(registers.get(output.pointer).to_usize(), &to_value_vec(&bytes)); Ok(()) } + BlackBoxOp::Blake3 { message, output } => { + let message = to_u8_vec(read_heap_vector(memory, registers, message)); + let bytes = blake3(message.as_slice())?; + memory.write_slice(registers.get(output.pointer).to_usize(), &to_value_vec(&bytes)); + Ok(()) + } BlackBoxOp::Keccak256 { message, output } => { let message = to_u8_vec(read_heap_vector(memory, registers, message)); let bytes = keccak256(message.as_slice())?; memory.write_slice(registers.get(output.pointer).to_usize(), &to_value_vec(&bytes)); Ok(()) } - BlackBoxOp::HashToField128Security { message, output } => { - let field = hash_to_field_128_security(&to_u8_vec(read_heap_vector( - memory, registers, message, - )))?; - registers.set(*output, field.into()); + BlackBoxOp::Keccakf1600 { message, output } => { + let state_vec: Vec = read_heap_vector(memory, registers, message) + .iter() + .map(|value| value.to_field().try_to_u64().unwrap()) + .collect(); + let state: [u64; 25] = state_vec.try_into().unwrap(); + + let new_state = keccakf1600(state)?; + + let new_state: Vec = + new_state.into_iter().map(|x| Value::from(x as usize)).collect(); + memory.write_slice(registers.get(output.pointer).to_usize(), &new_state); Ok(()) } BlackBoxOp::EcdsaSecp256k1 { @@ -85,11 +98,7 @@ pub(crate) fn evaluate_black_box( signature, result: result_register, } => { - let bb_func = match op { - BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, - BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, - _ => unreachable!(), - }; + let bb_func = black_box_function_from_op(op); let public_key_x: [u8; 32] = to_u8_vec(read_heap_array( memory, @@ -124,7 +133,7 @@ pub(crate) fn evaluate_black_box( BlackBoxOp::EcdsaSecp256r1 { .. } => { ecdsa_secp256r1_verify(&hashed_msg, &public_key_x, &public_key_y, &signature)? } - _ => unreachable!(), + _ => unreachable!("`BlackBoxOp` is guarded against being a non-ecdsa operation"), }; registers.set(*result_register, result.into()); @@ -147,6 +156,22 @@ pub(crate) fn evaluate_black_box( memory.write_slice(registers.get(result.pointer).to_usize(), &[x.into(), y.into()]); Ok(()) } + BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + let input1_x = registers.get(*input1_x).to_field(); + let input1_y = registers.get(*input1_y).to_field(); + let input2_x = registers.get(*input2_x).to_field(); + let input2_y = registers.get(*input2_y).to_field(); + let (x, y) = solver.ec_add(&input1_x, &input1_y, &input2_x, &input2_y)?; + memory.write_slice(registers.get(result.pointer).to_usize(), &[x.into(), y.into()]); + Ok(()) + } + BlackBoxOp::EmbeddedCurveDouble { input1_x, input1_y, result } => { + let input1_x = registers.get(*input1_x).to_field(); + let input1_y = registers.get(*input1_y).to_field(); + let (x, y) = solver.ec_double(&input1_x, &input1_y)?; + memory.write_slice(registers.get(result.pointer).to_usize(), &[x.into(), y.into()]); + Ok(()) + } BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { let inputs: Vec = read_heap_vector(memory, registers, inputs).iter().map(|x| x.to_field()).collect(); @@ -178,6 +203,24 @@ pub(crate) fn evaluate_black_box( } } +fn black_box_function_from_op(op: &BlackBoxOp) -> BlackBoxFunc { + match op { + BlackBoxOp::Sha256 { .. } => BlackBoxFunc::SHA256, + BlackBoxOp::Blake2s { .. } => BlackBoxFunc::Blake2s, + BlackBoxOp::Blake3 { .. } => BlackBoxFunc::Blake3, + BlackBoxOp::Keccak256 { .. } => BlackBoxFunc::Keccak256, + BlackBoxOp::Keccakf1600 { .. } => BlackBoxFunc::Keccakf1600, + BlackBoxOp::EcdsaSecp256k1 { .. } => BlackBoxFunc::EcdsaSecp256k1, + BlackBoxOp::EcdsaSecp256r1 { .. } => BlackBoxFunc::EcdsaSecp256r1, + BlackBoxOp::SchnorrVerify { .. } => BlackBoxFunc::SchnorrVerify, + BlackBoxOp::PedersenCommitment { .. } => BlackBoxFunc::PedersenCommitment, + BlackBoxOp::PedersenHash { .. } => BlackBoxFunc::PedersenHash, + BlackBoxOp::FixedBaseScalarMul { .. } => BlackBoxFunc::FixedBaseScalarMul, + BlackBoxOp::EmbeddedCurveAdd { .. } => BlackBoxFunc::EmbeddedCurveAdd, + BlackBoxOp::EmbeddedCurveDouble { .. } => BlackBoxFunc::EmbeddedCurveDouble, + } +} + #[cfg(test)] mod test { use acir::brillig::{BlackBoxOp, HeapValueType}; diff --git a/acvm-repo/brillig_vm/src/lib.rs b/acvm-repo/brillig_vm/src/lib.rs index f44c8693ead..3d9ed8330e5 100644 --- a/acvm-repo/brillig_vm/src/lib.rs +++ b/acvm-repo/brillig_vm/src/lib.rs @@ -523,6 +523,22 @@ impl BlackBoxFunctionSolver for DummyBlackBoxSolver { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((4_u128.into(), 5_u128.into())) } + fn ec_add( + &self, + _input1_x: &FieldElement, + _input1_y: &FieldElement, + _input2_x: &FieldElement, + _input2_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Ok((5_u128.into(), 6_u128.into())) + } + fn ec_double( + &self, + _input1_x: &FieldElement, + _input1_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + Ok((7_u128.into(), 8_u128.into())) + } } #[cfg(test)] diff --git a/acvm-repo/stdlib/CHANGELOG.md b/acvm-repo/stdlib/CHANGELOG.md deleted file mode 100644 index bea80c95d1e..00000000000 --- a/acvm-repo/stdlib/CHANGELOG.md +++ /dev/null @@ -1,350 +0,0 @@ -# Changelog - -## [0.27.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.26.1...acvm_stdlib-v0.27.0) (2023-09-19) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.26.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.26.0...acvm_stdlib-v0.26.1) (2023-09-12) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.26.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.25.0...acvm_stdlib-v0.26.0) (2023-09-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.25.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.24.1...acvm_stdlib-v0.25.0) (2023-09-04) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.24.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.24.0...acvm_stdlib-v0.24.1) (2023-09-03) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.24.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.23.0...acvm_stdlib-v0.24.0) (2023-08-31) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.23.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.22.0...acvm_stdlib-v0.23.0) (2023-08-30) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.22.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.21.0...acvm_stdlib-v0.22.0) (2023-08-18) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.21.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.20.1...acvm_stdlib-v0.21.0) (2023-07-26) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.20.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.20.0...acvm_stdlib-v0.20.1) (2023-07-26) - - -### Features - -* add optimisations to fallback black box functions on booleans ([#446](https://github.com/noir-lang/acvm/issues/446)) ([2cfb2a8](https://github.com/noir-lang/acvm/commit/2cfb2a8cf911a81eedbd9da13ab2c616abd67f83)) -* **stdlib:** Add fallback implementation of `Keccak256` black box function ([#445](https://github.com/noir-lang/acvm/issues/445)) ([f7ebb03](https://github.com/noir-lang/acvm/commit/f7ebb03653c971f119700ff8126d9eb5ff01be0f)) - -## [0.20.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.19.1...acvm_stdlib-v0.20.0) (2023-07-20) - - -### Features - -* **stdlib:** Add fallback implementation of `HashToField128Security` black box function ([#435](https://github.com/noir-lang/acvm/issues/435)) ([ed40f22](https://github.com/noir-lang/acvm/commit/ed40f228529e888d1960bfa70cb92b277e24b37f)) - -## [0.19.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.19.0...acvm_stdlib-v0.19.1) (2023-07-17) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.19.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.2...acvm_stdlib-v0.19.0) (2023-07-15) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.18.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.1...acvm_stdlib-v0.18.2) (2023-07-12) - - -### Features - -* **stdlib:** Add fallback implementation of `Blake2s` black box function ([#424](https://github.com/noir-lang/acvm/issues/424)) ([982d940](https://github.com/noir-lang/acvm/commit/982d94087d46092ce7a5e94dbd7e732195f58e42)) - -## [0.18.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.18.0...acvm_stdlib-v0.18.1) (2023-07-12) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.18.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.17.0...acvm_stdlib-v0.18.0) (2023-07-12) - - -### Features - -* **stdlib:** Add fallback implementation of `SHA256` black box function ([#407](https://github.com/noir-lang/acvm/issues/407)) ([040369a](https://github.com/noir-lang/acvm/commit/040369adc8749fa5ec2edd255ff54c105c3140f5)) - -## [0.17.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.16.0...acvm_stdlib-v0.17.0) (2023-07-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.16.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.15.1...acvm_stdlib-v0.16.0) (2023-07-06) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.15.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.15.0...acvm_stdlib-v0.15.1) (2023-06-20) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.15.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.2...acvm_stdlib-v0.15.0) (2023-06-15) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.1...acvm_stdlib-v0.14.2) (2023-06-08) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.14.0...acvm_stdlib-v0.14.1) (2023-06-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.14.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.3...acvm_stdlib-v0.14.0) (2023-06-06) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.13.3](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.2...acvm_stdlib-v0.13.3) (2023-06-05) - - -### Bug Fixes - -* Empty commit to trigger release-please ([e8f0748](https://github.com/noir-lang/acvm/commit/e8f0748042ef505d59ab63266d3c36c5358ee30d)) - -## [0.13.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.1...acvm_stdlib-v0.13.2) (2023-06-02) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.13.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.13.0...acvm_stdlib-v0.13.1) (2023-06-01) - - -### Bug Fixes - -* **ci:** Correct typo to avoid `undefined` in changelogs ([#333](https://github.com/noir-lang/acvm/issues/333)) ([d3424c0](https://github.com/noir-lang/acvm/commit/d3424c04fd303c9cbe25d03118d8b358cbb84b83)) - -## [0.13.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.12.0...acvm_stdlib-v0.13.0) (2023-06-01) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.12.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.11.0...acvm_stdlib-v0.12.0) (2023-05-17) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.11.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.3...acvm_stdlib-v0.11.0) (2023-05-04) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.3](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.2...acvm_stdlib-v0.10.3) (2023-04-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.2](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.1...acvm_stdlib-v0.10.2) (2023-04-28) - - -### Bug Fixes - -* add default flag to `acvm_stdlib` ([#242](https://github.com/noir-lang/acvm/issues/242)) ([83b6fa8](https://github.com/noir-lang/acvm/commit/83b6fa8302569add7e3ac8481b2fd2a6a1ff3576)) - -## [0.10.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.10.0...acvm_stdlib-v0.10.1) (2023-04-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - -## [0.10.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.9.0...acvm_stdlib-v0.10.0) (2023-04-26) - - -### ⚠ BREAKING CHANGES - -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) - -### Bug Fixes - -* prevent `bn254` feature flag always being enabled ([#225](https://github.com/noir-lang/acvm/issues/225)) ([82eee6a](https://github.com/noir-lang/acvm/commit/82eee6ab08ae480f04904ca8571fd88f4466c000)) - - -### Miscellaneous Chores - -* organise operator implementations for Expression ([#190](https://github.com/noir-lang/acvm/issues/190)) ([a619df6](https://github.com/noir-lang/acvm/commit/a619df614bbb9b2518b788b42a7553b069823a0f)) - -## [0.9.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.8.1...acvm_stdlib-v0.9.0) (2023-04-07) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.8.1 to 0.9.0 - -## [0.8.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.8.0...acvm_stdlib-v0.8.1) (2023-03-30) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.8.0 to 0.8.1 - -## [0.8.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.7.1...acvm_stdlib-v0.8.0) (2023-03-28) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.7.1 to 0.8.0 - -## [0.7.1](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.7.0...acvm_stdlib-v0.7.1) (2023-03-27) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.7.0 to 0.7.1 - -## [0.7.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.6.0...acvm_stdlib-v0.7.0) (2023-03-23) - - -### Miscellaneous Chores - -* **acvm_stdlib:** Synchronize acvm versions - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.6.0 to 0.7.0 - -## [0.6.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.5.0...acvm_stdlib-v0.6.0) (2023-03-03) - - -### ⚠ BREAKING CHANGES - -* **acir:** rename `term_addition` to `push_addition_term` -* **acir:** rename `term_multiplication` to `push_multiplication_term` ([#122](https://github.com/noir-lang/acvm/issues/122)) - -### Miscellaneous Chores - -* **acir:** rename `term_addition` to `push_addition_term` ([d389385](https://github.com/noir-lang/acvm/commit/d38938542851a97dc01727438391e6a65e44c689)) -* **acir:** rename `term_multiplication` to `push_multiplication_term` ([#122](https://github.com/noir-lang/acvm/issues/122)) ([d389385](https://github.com/noir-lang/acvm/commit/d38938542851a97dc01727438391e6a65e44c689)) - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.5.0 to 0.6.0 - -## [0.5.0](https://github.com/noir-lang/acvm/compare/acvm_stdlib-v0.4.1...acvm_stdlib-v0.5.0) (2023-02-22) - - -### ⚠ BREAKING CHANGES - -* refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) - -### Miscellaneous Chores - -* refactor ToRadix to ToRadixLe and ToRadixBe ([#58](https://github.com/noir-lang/acvm/issues/58)) ([2427a27](https://github.com/noir-lang/acvm/commit/2427a275048e598c6d651cce8348a4c55148f235)) - - -### Dependencies - -* The following workspace dependencies were updated - * dependencies - * acir bumped from 0.4.1 to 0.5.0 diff --git a/acvm-repo/stdlib/Cargo.toml b/acvm-repo/stdlib/Cargo.toml deleted file mode 100644 index de50d112df6..00000000000 --- a/acvm-repo/stdlib/Cargo.toml +++ /dev/null @@ -1,22 +0,0 @@ -[package] -name = "acvm_stdlib" -description = "The ACVM standard library." -# x-release-please-start-version -version = "0.37.0" -# x-release-please-end -authors.workspace = true -edition.workspace = true -license.workspace = true -rust-version.workspace = true -repository.workspace = true - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -acir.workspace = true - -[features] -default = ["bn254"] -bn254 = ["acir/bn254"] -bls12_381 = ["acir/bls12_381"] -testing = ["bn254"] diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs deleted file mode 100644 index 92bf93d2d56..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/blake2s.rs +++ /dev/null @@ -1,468 +0,0 @@ -//! Blake2s fallback function. -use super::{ - utils::{byte_decomposition, round_to_nearest_byte}, - UInt32, -}; -use acir::{ - circuit::Opcode, - native_types::{Expression, Witness}, - FieldElement, -}; -use std::vec; - -const BLAKE2S_BLOCKBYTES_USIZE: usize = 64; -const MSG_SCHEDULE_BLAKE2: [[usize; 16]; 10] = [ - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], - [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3], - [11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4], - [7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8], - [9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13], - [2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9], - [12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11], - [13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10], - [6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5], - [10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0], -]; -const INITIAL_H: [u32; 8] = [ - 0x6b08e647, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, -]; -const IV_VALUE: [u32; 8] = [ - 0x6A09E667, 0xBB67AE85, 0x3C6EF372, 0xA54FF53A, 0x510E527F, 0x9B05688C, 0x1F83D9AB, 0x5BE0CD19, -]; - -pub fn blake2s( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_blake2s_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -pub(crate) fn create_blake2s_constraint( - input: Vec, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - - // prepare constants - let (mut blake2s_state, extra_opcodes, num_witness) = Blake2sState::init(num_witness); - new_opcodes.extend(extra_opcodes); - let (blake2s_constants, extra_opcodes, num_witness) = - Blake2sConstantsInCircuit::init(num_witness); - new_opcodes.extend(extra_opcodes); - let (blake2s_iv, extra_opcodes, mut num_witness) = Blake2sIV::init(num_witness); - new_opcodes.extend(extra_opcodes); - - let mut offset = 0; - let mut size = input.len(); - - while size > BLAKE2S_BLOCKBYTES_USIZE { - let (extra_opcodes, updated_witness_counter) = blake2s_increment_counter( - &mut blake2s_state, - &blake2s_constants.blake2s_blockbytes_uint32, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, updated_witness_counter) = blake2s_compress( - &mut blake2s_state, - &blake2s_iv, - input.get(offset..offset + BLAKE2S_BLOCKBYTES_USIZE).unwrap(), - updated_witness_counter, - ); - new_opcodes.extend(extra_opcodes); - offset += BLAKE2S_BLOCKBYTES_USIZE; - size -= BLAKE2S_BLOCKBYTES_USIZE; - num_witness = updated_witness_counter; - } - - let (u32_max, extra_opcodes, mut num_witness) = UInt32::load_constant(u32::MAX, num_witness); - new_opcodes.extend(extra_opcodes); - blake2s_state.f[0] = u32_max; - - // pad final block - let mut final_block = input.get(offset..).unwrap().to_vec(); - for _ in 0..BLAKE2S_BLOCKBYTES_USIZE - final_block.len() { - let (pad, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - final_block.push(pad.inner); - num_witness = updated_witness_counter; - } - - let (size_w, extra_opcodes, num_witness) = UInt32::load_constant(size as u32, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - blake2s_increment_counter(&mut blake2s_state, &size_w, num_witness); - new_opcodes.extend(extra_opcodes); - - let (extra_opcodes, num_witness) = - blake2s_compress(&mut blake2s_state, &blake2s_iv, &final_block, num_witness); - new_opcodes.extend(extra_opcodes); - - // decompose the result bytes in u32 to u8 - let (extra_opcodes, mut byte1, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[0].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte2, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[1].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte3, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[2].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte4, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[3].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte5, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[4].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte6, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[5].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte7, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[6].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut byte8, num_witness) = - byte_decomposition(Expression::from(blake2s_state.h[7].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - - byte1.reverse(); - byte2.reverse(); - byte3.reverse(); - byte4.reverse(); - byte5.reverse(); - byte6.reverse(); - byte7.reverse(); - byte8.reverse(); - - let result = vec![byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8] - .into_iter() - .flatten() - .collect(); - - (result, num_witness, new_opcodes) -} - -fn blake2s_increment_counter( - state: &mut Blake2sState, - inc: &UInt32, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - - // t0 + inc - let (state_t0, extra_opcodes, num_witness) = state.t[0].add(inc, num_witness); - new_opcodes.extend(extra_opcodes); - state.t[0] = state_t0; - - // t1 + (t0 < inc) - let (to_inc, extra_opcodes, num_witness) = state.t[0].less_than_comparison(inc, num_witness); - new_opcodes.extend(extra_opcodes); - let (state_t1, extra_opcodes, num_witness) = state.t[1].add(&to_inc, num_witness); - new_opcodes.extend(extra_opcodes); - state.t[1] = state_t1; - - (new_opcodes, num_witness) -} - -fn blake2s_compress( - state: &mut Blake2sState, - blake2s_iv: &Blake2sIV, - input: &[Witness], - mut num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut m = Vec::new(); - let mut v = Vec::new(); - - for i in 0..16 { - let mut mi_bytes = input.get(i * 4..i * 4 + 4).unwrap().to_vec(); - mi_bytes.reverse(); - let (mi, extra_opcodes, updated_witness_counter) = - UInt32::from_witnesses(&mi_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - m.push(mi[0]); - num_witness = updated_witness_counter; - } - - for i in 0..8 { - v.push(state.h[i]); - } - - v.push(blake2s_iv.iv[0]); - v.push(blake2s_iv.iv[1]); - v.push(blake2s_iv.iv[2]); - v.push(blake2s_iv.iv[3]); - let (v12, extra_opcodes, num_witness) = state.t[0].xor(&blake2s_iv.iv[4], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v12); - let (v13, extra_opcodes, num_witness) = state.t[1].xor(&blake2s_iv.iv[5], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v13); - let (v14, extra_opcodes, num_witness) = state.f[0].xor(&blake2s_iv.iv[6], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v14); - let (v15, extra_opcodes, num_witness) = state.f[1].xor(&blake2s_iv.iv[7], num_witness); - new_opcodes.extend(extra_opcodes); - v.push(v15); - - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 0, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 1, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 2, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 3, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 5, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 6, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 7, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = blake2s_round(&mut v, &m, 8, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, mut num_witness) = blake2s_round(&mut v, &m, 9, num_witness); - new_opcodes.extend(extra_opcodes); - - for i in 0..8 { - let (a, extra_opcodes, updated_witness_counter) = state.h[i].xor(&v[i], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_hi, extra_opcodes, updated_witness_counter) = - a.xor(&v[i + 8], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - state.h[i] = state_hi; - num_witness = updated_witness_counter; - } - - (new_opcodes, num_witness) -} - -fn blake2s_round( - state: &mut [UInt32], - msg: &[UInt32], - round: usize, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - let schedule = &MSG_SCHEDULE_BLAKE2[round]; - - // Mix the columns. - let (extra_opcodes, num_witness) = - g(state, 0, 4, 8, 12, msg[schedule[0]], msg[schedule[1]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 1, 5, 9, 13, msg[schedule[2]], msg[schedule[3]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 2, 6, 10, 14, msg[schedule[4]], msg[schedule[5]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 3, 7, 11, 15, msg[schedule[6]], msg[schedule[7]], num_witness); - new_opcodes.extend(extra_opcodes); - - // Mix the rows. - let (extra_opcodes, num_witness) = - g(state, 0, 5, 10, 15, msg[schedule[8]], msg[schedule[9]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 1, 6, 11, 12, msg[schedule[10]], msg[schedule[11]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 2, 7, 8, 13, msg[schedule[12]], msg[schedule[13]], num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, num_witness) = - g(state, 3, 4, 9, 14, msg[schedule[14]], msg[schedule[15]], num_witness); - new_opcodes.extend(extra_opcodes); - - (new_opcodes, num_witness) -} - -#[allow(clippy::too_many_arguments)] -fn g( - state: &mut [UInt32], - a: usize, - b: usize, - c: usize, - d: usize, - x: UInt32, - y: UInt32, - num_witness: u32, -) -> (Vec, u32) { - let mut new_opcodes = Vec::new(); - - // calculate state[a] as `state[a] + state[b] + x` - let (state_a_1, extra_opcodes, num_witness) = state[a].add(&state[b], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_a, extra_opcodes, num_witness) = state_a_1.add(&x, num_witness); - new_opcodes.extend(extra_opcodes); - state[a] = state_a; - - // calculate state[d] as `(state[d] ^ state[a]).ror(16)` - let (state_d_1, extra_opcodes, num_witness) = state[d].xor(&state[a], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_d, extra_opcodes, num_witness) = state_d_1.ror(16, num_witness); - new_opcodes.extend(extra_opcodes); - state[d] = state_d; - - // calculate state[c] as `state[c] + state[d]` - let (state_c, extra_opcodes, num_witness) = state[c].add(&state[d], num_witness); - new_opcodes.extend(extra_opcodes); - state[c] = state_c; - - // caclulate state[b] as `(state[b] ^ state[c]).ror(12)` - let (state_b_1, extra_opcodes, num_witness) = state[b].xor(&state[c], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_b, extra_opcodes, num_witness) = state_b_1.ror(12, num_witness); - new_opcodes.extend(extra_opcodes); - state[b] = state_b; - - // calculate state[a] as `state[a] + state[b] + y` - let (state_a_1, extra_opcodes, num_witness) = state[a].add(&state[b], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_a, extra_opcodes, num_witness) = state_a_1.add(&y, num_witness); - new_opcodes.extend(extra_opcodes); - state[a] = state_a; - - // calculate state[d] as `(state[d] ^ state[a]).ror(8)` - let (state_d_1, extra_opcodes, num_witness) = state[d].xor(&state[a], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_d, extra_opcodes, num_witness) = state_d_1.ror(8, num_witness); - new_opcodes.extend(extra_opcodes); - state[d] = state_d; - - // calculate state[c] as `state[c] + state[d]` - let (state_c, extra_opcodes, num_witness) = state[c].add(&state[d], num_witness); - new_opcodes.extend(extra_opcodes); - state[c] = state_c; - - // caclulate state[b] as `(state[b] ^ state[c]).ror(7)` - let (state_b_1, extra_opcodes, num_witness) = state[b].xor(&state[c], num_witness); - new_opcodes.extend(extra_opcodes); - let (state_b, extra_opcodes, num_witness) = state_b_1.ror(7, num_witness); - new_opcodes.extend(extra_opcodes); - state[b] = state_b; - - (new_opcodes, num_witness) -} - -/// Blake2s state `h` `t` and `f` -#[derive(Debug)] -struct Blake2sState { - h: Vec, - t: Vec, - f: Vec, -} - -impl Blake2sState { - fn new(h: Vec, t: Vec, f: Vec) -> Self { - Blake2sState { h, t, f } - } - - /// Initialize internal state of Blake2s - fn init(mut num_witness: u32) -> (Blake2sState, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut h = Vec::new(); - let mut t = Vec::new(); - let mut f = Vec::new(); - - for init_h in INITIAL_H { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(init_h, num_witness); - new_opcodes.extend(extra_opcodes); - h.push(new_witness); - num_witness = updated_witness_counter; - } - - for _ in 0..2 { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - t.push(new_witness); - num_witness = updated_witness_counter; - } - - for _ in 0..2 { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(0_u32, num_witness); - new_opcodes.extend(extra_opcodes); - f.push(new_witness); - num_witness = updated_witness_counter; - } - - let blake2s_state = Blake2sState::new(h, t, f); - - (blake2s_state, new_opcodes, num_witness) - } -} - -/// Blake2s IV (Initialization Vector) -struct Blake2sIV { - iv: Vec, -} - -impl Blake2sIV { - fn new(iv: Vec) -> Self { - Blake2sIV { iv } - } - - /// Initialize IV of Blake2s - fn init(mut num_witness: u32) -> (Blake2sIV, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut iv = Vec::new(); - - for iv_v in IV_VALUE { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(iv_v, num_witness); - new_opcodes.extend(extra_opcodes); - iv.push(new_witness); - num_witness = updated_witness_counter; - } - - let blake2s_iv = Blake2sIV::new(iv); - - (blake2s_iv, new_opcodes, num_witness) - } -} - -struct Blake2sConstantsInCircuit { - blake2s_blockbytes_uint32: UInt32, -} - -impl Blake2sConstantsInCircuit { - fn new(blake2s_blockbytes_uint32: UInt32) -> Self { - Blake2sConstantsInCircuit { blake2s_blockbytes_uint32 } - } - - fn init(num_witness: u32) -> (Blake2sConstantsInCircuit, Vec, u32) { - let mut new_opcodes = Vec::new(); - let (blake2s_blockbytes_uint32, extra_opcodes, num_witness) = - UInt32::load_constant(64_u32, num_witness); - new_opcodes.extend(extra_opcodes); - - (Blake2sConstantsInCircuit::new(blake2s_blockbytes_uint32), new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs deleted file mode 100644 index 91a7cdd09e4..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/hash_to_field.rs +++ /dev/null @@ -1,168 +0,0 @@ -//! HashToField128Security fallback function. -use super::{ - blake2s::create_blake2s_constraint, - utils::{byte_decomposition, round_to_nearest_byte}, - UInt32, -}; -use crate::helpers::VariableStore; -use acir::{ - brillig::{self, RegisterIndex}, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -pub fn hash_to_field( - inputs: Vec<(Expression, u32)>, - outputs: Witness, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_blake2s_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // transform bytes to a single field - let (result, extra_opcodes, num_witness) = field_from_be_bytes(&result, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - let mut expr = Expression::from(outputs); - expr.push_addition_term(-FieldElement::one(), result); - new_opcodes.push(Opcode::Arithmetic(expr)); - (num_witness, new_opcodes) -} - -/// Convert bytes represented by [Witness]es to a single [FieldElement] -fn field_from_be_bytes(result: &[Witness], num_witness: u32) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // Load `0` and `256` using the load constant function from UInt32 - let (new_witness, extra_opcodes, num_witness) = UInt32::load_constant(0, num_witness); - let mut new_witness = new_witness.inner; - new_opcodes.extend(extra_opcodes); - let (const_256, extra_opcodes, mut num_witness) = UInt32::load_constant(256, num_witness); - let const_256 = const_256.inner; - new_opcodes.extend(extra_opcodes); - - // add byte and multiply 256 each round - for r in result.iter().take(result.len() - 1) { - let (updated_witness, extra_opcodes, updated_witness_counter) = - field_addition(&new_witness, r, num_witness); - new_opcodes.extend(extra_opcodes); - let (updated_witness, extra_opcodes, updated_witness_counter) = - field_mul(&updated_witness, &const_256, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - new_witness = updated_witness; - num_witness = updated_witness_counter; - } - - let (new_witness, extra_opcodes, num_witness) = - field_addition(&new_witness, &result[result.len() - 1], num_witness); - new_opcodes.extend(extra_opcodes); - - (new_witness, new_opcodes, num_witness) -} - -/// Caculate and constrain `self` + `rhs` as field -fn field_addition( - lhs: &Witness, - rhs: &Witness, - mut num_witness: u32, -) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate `self` + `rhs` as field - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *lhs)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *rhs)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Add, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain addition - let mut add_expr = Expression::from(new_witness); - add_expr.push_addition_term(-FieldElement::one(), *lhs); - add_expr.push_addition_term(-FieldElement::one(), *rhs); - new_opcodes.push(Opcode::Arithmetic(add_expr)); - - (new_witness, new_opcodes, num_witness) -} - -/// Calculate and constrain `self` * `rhs` as field -pub(crate) fn field_mul( - lhs: &Witness, - rhs: &Witness, - mut num_witness: u32, -) -> (Witness, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calulate `self` * `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *lhs)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), *rhs)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Mul, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain mul - let mut mul_constraint = Expression::from(new_witness); - mul_constraint.push_multiplication_term(-FieldElement::one(), *lhs, *rhs); - new_opcodes.push(Opcode::Arithmetic(mul_constraint)); - - (new_witness, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs deleted file mode 100644 index d91db3dc2c6..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/keccak256.rs +++ /dev/null @@ -1,269 +0,0 @@ -//! Keccak256 fallback function. -use super::{ - sha256::pad, - uint8::UInt8, - utils::{byte_decomposition, round_to_nearest_byte}, - UInt64, -}; -use acir::{ - circuit::Opcode, - native_types::{Expression, Witness}, - FieldElement, -}; - -const STATE_NUM_BYTES: usize = 200; -const BITS: usize = 256; -const WORD_SIZE: usize = 8; -const BLOCK_SIZE: usize = (1600 - BITS * 2) / WORD_SIZE; -const ROUND_CONSTANTS: [u64; 24] = [ - 1, - 0x8082, - 0x800000000000808a, - 0x8000000080008000, - 0x808b, - 0x80000001, - 0x8000000080008081, - 0x8000000000008009, - 0x8a, - 0x88, - 0x80008009, - 0x8000000a, - 0x8000808b, - 0x800000000000008b, - 0x8000000000008089, - 0x8000000000008003, - 0x8000000000008002, - 0x8000000000000080, - 0x800a, - 0x800000008000000a, - 0x8000000080008081, - 0x8000000000008080, - 0x80000001, - 0x8000000080008008, -]; -const RHO: [u32; 24] = - [1, 3, 6, 10, 15, 21, 28, 36, 45, 55, 2, 14, 27, 41, 56, 8, 25, 43, 62, 18, 39, 61, 20, 44]; -const PI: [usize; 24] = - [10, 7, 11, 17, 18, 3, 5, 16, 8, 21, 24, 4, 15, 23, 19, 13, 12, 2, 20, 14, 22, 9, 6, 1]; - -pub fn keccak256( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = create_keccak_constraint(new_inputs, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -fn create_keccak_constraint( - input: Vec, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - let num_blocks = input.len() / BLOCK_SIZE + 1; - - // pad keccak - let (input, extra_opcodes, mut num_witness) = pad_keccak(input, num_blocks, num_witness); - new_opcodes.extend(extra_opcodes); - - // prepare state - let mut state = Vec::with_capacity(200); - for _ in 0..STATE_NUM_BYTES { - let (zero, extra_opcodes, updated_witness_counter) = UInt8::load_constant(0, num_witness); - new_opcodes.extend(extra_opcodes); - state.push(zero); - num_witness = updated_witness_counter; - } - - // process block - for i in 0..num_blocks { - for j in 0..BLOCK_SIZE { - let (new_state, extra_opcodes, updated_witness_counter) = - state[j].xor(&UInt8::new(input[i * BLOCK_SIZE + j]), num_witness); - new_opcodes.extend(extra_opcodes); - state[j] = new_state; - num_witness = updated_witness_counter; - } - let (new_state, extra_opcodes, updated_witness_counter) = keccakf(state, num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - state = new_state; - } - - let result: Vec = state[..32].iter().map(|x| x.inner).collect(); - (result, num_witness, new_opcodes) -} - -fn keccakf(state: Vec, num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // turn state into UInt64 - let mut state_witnesses: Vec = Vec::new(); - for i in 0..state.len() / 8 { - for j in 0..8 { - state_witnesses.push(state[i * 8 + (7 - j)].inner); - } - } - let (mut state_u64, extra_opcodes, mut num_witness) = - UInt64::from_witnesses(&state_witnesses, num_witness); - new_opcodes.extend(extra_opcodes); - - // process round - for round_constant in ROUND_CONSTANTS { - let (new_state_u64, extra_opcodes, updated_witness_counter) = - keccak_round(state_u64, round_constant, num_witness); - state_u64 = new_state_u64; - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - } - - // turn state back to UInt8 - let state_u64_witnesses: Vec = state_u64.into_iter().map(|x| x.inner).collect(); - let mut state_u8 = Vec::with_capacity(state_u64_witnesses.len()); - for state_u64_witness in state_u64_witnesses { - let (extra_opcodes, mut u8s, updated_witness_counter) = - byte_decomposition(Expression::from(state_u64_witness), 8, num_witness); - new_opcodes.extend(extra_opcodes); - u8s.reverse(); - state_u8.push(u8s); - num_witness = updated_witness_counter; - } - - let state_u8: Vec = state_u8.into_iter().flatten().map(UInt8::new).collect(); - (state_u8, new_opcodes, num_witness) -} - -fn keccak_round( - mut a: Vec, - round_const: u64, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // theta - let mut array = Vec::with_capacity(5); - for _ in 0..5 { - let (zero, extra_opcodes, updated_witness_counter) = UInt64::load_constant(0, num_witness); - array.push(zero); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - } - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - let (new_array_ele, extra_opcodes, updated_witness_counter) = - array[x].xor(&a[x + y], num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - array[x] = new_array_ele; - } - } - for x in 0..5 { - for y_count in 0..5 { - let y = y_count * 5; - let (a_ele, extra_opcodes, updated_witness_counter) = - array[(x + 1) % 5].rol(1, num_witness); - new_opcodes.extend(extra_opcodes); - let (b_ele, extra_opcodes, updated_witness_counter) = - array[(x + 4) % 5].xor(&a_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (new_array_ele, extra_opcodes, updated_witness_counter) = - a[x + y].xor(&b_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - a[x + y] = new_array_ele; - } - } - - // rho and pi - let mut last = a[1]; - for x in 0..24 { - array[0] = a[PI[x]]; - let (a_ele, extra_opcodes, updated_witness_counter) = last.rol(RHO[x], num_witness); - new_opcodes.extend(extra_opcodes); - a[PI[x]] = a_ele; - num_witness = updated_witness_counter; - last = array[0]; - } - - // chi - for y_step in 0..5 { - let y = y_step * 5; - - array[..5].copy_from_slice(&a[y..(5 + y)]); - - for x in 0..5 { - let (a_ele, extra_opcodes, updated_witness_counter) = - array[(x + 1) % 5].not(num_witness); - new_opcodes.extend(extra_opcodes); - let (b_ele, extra_opcodes, updated_witness_counter) = - a_ele.and(&array[(x + 2) % 5], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c_ele, extra_opcodes, updated_witness_counter) = - array[x].xor(&b_ele, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - a[y + x] = c_ele; - num_witness = updated_witness_counter; - } - } - - // iota - let (rc, extra_opcodes, num_witness) = UInt64::load_constant(round_const, num_witness); - new_opcodes.extend(extra_opcodes); - let (a_ele, extra_opcodes, num_witness) = a[0].xor(&rc, num_witness); - new_opcodes.extend(extra_opcodes); - a[0] = a_ele; - - (a, new_opcodes, num_witness) -} - -fn pad_keccak( - mut input: Vec, - num_blocks: usize, - num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let total_len = BLOCK_SIZE * num_blocks; - - let (mut num_witness, pad_witness, extra_opcodes) = pad(0x01, 8, num_witness); - - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - for _ in 0..total_len - input.len() { - let (updated_witness_counter, pad_witness, extra_opcodes) = pad(0x00, 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - num_witness = updated_witness_counter; - } - - let (zero_x_80, extra_opcodes, num_witness) = UInt8::load_constant(0x80, num_witness); - new_opcodes.extend(extra_opcodes); - let (final_pad, extra_opcodes, num_witness) = - UInt8::new(input[total_len - 1]).xor(&zero_x_80, num_witness); - new_opcodes.extend(extra_opcodes); - input[total_len - 1] = final_pad.inner; - - (input, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs deleted file mode 100644 index fa8c1060a26..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/logic_fallbacks.rs +++ /dev/null @@ -1,127 +0,0 @@ -use crate::{blackbox_fallbacks::utils::mul_with_witness, helpers::VariableStore}; - -use super::utils::{bit_decomposition, boolean_expr}; -use acir::{ - acir_field::FieldElement, - circuit::Opcode, - native_types::{Expression, Witness}, -}; - -// Range constraint -pub fn range(opcode: Expression, bit_size: u32, mut num_witness: u32) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - let bit_constraint = Opcode::Arithmetic(boolean_expr(&opcode, &mut variables)); - return (variables.finalize(), vec![bit_constraint]); - } - - let (new_opcodes, _, updated_witness_counter) = - bit_decomposition(opcode, bit_size, num_witness); - (updated_witness_counter, new_opcodes) -} - -/// Returns a set of opcodes which constrain `a & b == result` -/// -/// `a` and `b` are assumed to be constrained to fit within `bit_size` externally. -pub fn and( - a: Expression, - b: Expression, - result: Witness, - bit_size: u32, - mut num_witness: u32, -) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - - let mut and_expr = mul_with_witness(&a, &b, &mut variables); - and_expr.push_addition_term(-FieldElement::one(), result); - - return (variables.finalize(), vec![Opcode::Arithmetic(and_expr)]); - } - // Decompose the operands into bits - // - let (extra_opcodes_a, a_bits, updated_witness_counter) = - bit_decomposition(a, bit_size, num_witness); - - let (extra_opcodes_b, b_bits, updated_witness_counter) = - bit_decomposition(b, bit_size, updated_witness_counter); - - assert_eq!(a_bits.len(), b_bits.len()); - assert_eq!(a_bits.len(), bit_size as usize); - - let mut two_pow = FieldElement::one(); - let two = FieldElement::from(2_i128); - - // Build an expression that Multiplies each bit element-wise - // This gives the same truth table as the AND operation - // Additionally, we multiply by a power of 2 to build up the - // expected output; ie result = \sum 2^i x_i * y_i - let mut and_expr = Expression::default(); - for (a_bit, b_bit) in a_bits.into_iter().zip(b_bits) { - and_expr.push_multiplication_term(two_pow, a_bit, b_bit); - two_pow = two * two_pow; - } - and_expr.push_addition_term(-FieldElement::one(), result); - - and_expr.sort(); - - let mut new_opcodes = Vec::new(); - new_opcodes.extend(extra_opcodes_a); - new_opcodes.extend(extra_opcodes_b); - new_opcodes.push(Opcode::Arithmetic(and_expr)); - - (updated_witness_counter, new_opcodes) -} - -/// Returns a set of opcodes which constrain `a ^ b == result` -/// -/// `a` and `b` are assumed to be constrained to fit within `bit_size` externally. -pub fn xor( - a: Expression, - b: Expression, - result: Witness, - bit_size: u32, - mut num_witness: u32, -) -> (u32, Vec) { - if bit_size == 1 { - let mut variables = VariableStore::new(&mut num_witness); - - let product = mul_with_witness(&a, &b, &mut variables); - let mut xor_expr = &(&a + &b) - &product; - xor_expr.push_addition_term(-FieldElement::one(), result); - - return (variables.finalize(), vec![Opcode::Arithmetic(xor_expr)]); - } - - // Decompose the operands into bits - // - let (extra_opcodes_a, a_bits, updated_witness_counter) = - bit_decomposition(a, bit_size, num_witness); - let (extra_opcodes_b, b_bits, updated_witness_counter) = - bit_decomposition(b, bit_size, updated_witness_counter); - - assert_eq!(a_bits.len(), b_bits.len()); - assert_eq!(a_bits.len(), bit_size as usize); - - let mut two_pow = FieldElement::one(); - let two = FieldElement::from(2_i128); - - // Build an xor expression - // TODO: check this is the correct arithmetization - let mut xor_expr = Expression::default(); - for (a_bit, b_bit) in a_bits.into_iter().zip(b_bits) { - xor_expr.push_addition_term(two_pow, a_bit); - xor_expr.push_addition_term(two_pow, b_bit); - two_pow = two * two_pow; - xor_expr.push_multiplication_term(-two_pow, a_bit, b_bit); - } - xor_expr.push_addition_term(-FieldElement::one(), result); - - xor_expr.sort(); - let mut new_opcodes = Vec::new(); - new_opcodes.extend(extra_opcodes_a); - new_opcodes.extend(extra_opcodes_b); - new_opcodes.push(Opcode::Arithmetic(xor_expr)); - - (updated_witness_counter, new_opcodes) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs deleted file mode 100644 index d2ca3c50fa7..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/mod.rs +++ /dev/null @@ -1,19 +0,0 @@ -mod blake2s; -mod hash_to_field; -mod keccak256; -mod logic_fallbacks; -mod sha256; -#[macro_use] -mod uint; -mod uint32; -mod uint64; -mod uint8; -mod utils; -pub use blake2s::blake2s; -pub use hash_to_field::hash_to_field; -pub use keccak256::keccak256; -pub use logic_fallbacks::{and, range, xor}; -pub use sha256::sha256; -pub use uint32::UInt32; -pub use uint64::UInt64; -pub use uint8::UInt8; diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs deleted file mode 100644 index 1661b030bcc..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/sha256.rs +++ /dev/null @@ -1,377 +0,0 @@ -//! Sha256 fallback function. -use super::uint32::UInt32; -use super::utils::{byte_decomposition, round_to_nearest_byte}; -use crate::helpers::VariableStore; -use acir::{ - brillig, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -const INIT_CONSTANTS: [u32; 8] = [ - 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19, -]; - -const ROUND_CONSTANTS: [u32; 64] = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2, -]; - -pub fn sha256( - inputs: Vec<(Expression, u32)>, - outputs: Vec, - mut num_witness: u32, -) -> (u32, Vec) { - let mut new_opcodes = Vec::new(); - let mut new_inputs = Vec::new(); - let mut total_num_bytes = 0; - - // Decompose the input field elements into bytes and collect the resulting witnesses. - for (witness, num_bits) in inputs { - let num_bytes = round_to_nearest_byte(num_bits); - total_num_bytes += num_bytes; - let (extra_opcodes, extra_inputs, updated_witness_counter) = - byte_decomposition(witness, num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - new_inputs.extend(extra_inputs); - num_witness = updated_witness_counter; - } - - let (result, num_witness, extra_opcodes) = - create_sha256_constraint(new_inputs, total_num_bytes, num_witness); - new_opcodes.extend(extra_opcodes); - - // constrain the outputs to be the same as the result of the circuit - for i in 0..outputs.len() { - let mut expr = Expression::from(outputs[i]); - expr.push_addition_term(-FieldElement::one(), result[i]); - new_opcodes.push(Opcode::Arithmetic(expr)); - } - (num_witness, new_opcodes) -} - -fn create_sha256_constraint( - mut input: Vec, - total_num_bytes: u32, - num_witness: u32, -) -> (Vec, u32, Vec) { - let mut new_opcodes = Vec::new(); - - // pad the bytes according to sha256 padding rules - let message_bits = total_num_bytes * 8; - let (mut num_witness, pad_witness, extra_opcodes) = pad(128, 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - let bytes_per_block = 64; - let num_bytes = (input.len() + 8) as u32; - let num_blocks = num_bytes / bytes_per_block + ((num_bytes % bytes_per_block != 0) as u32); - let num_total_bytes = num_blocks * bytes_per_block; - for _ in num_bytes..num_total_bytes { - let (updated_witness_counter, pad_witness, extra_opcodes) = pad(0, 8, num_witness); - num_witness = updated_witness_counter; - new_opcodes.extend(extra_opcodes); - input.push(pad_witness); - } - let (num_witness, pad_witness, extra_opcodes) = pad(message_bits, 64, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, pad_witness, num_witness) = - byte_decomposition(pad_witness.into(), 8, num_witness); - new_opcodes.extend(extra_opcodes); - input.extend(pad_witness); - - // turn witness into u32 and load sha256 state - let (input, extra_opcodes, num_witness) = UInt32::from_witnesses(&input, num_witness); - new_opcodes.extend(extra_opcodes); - let (mut rolling_hash, extra_opcodes, num_witness) = prepare_state_constants(num_witness); - new_opcodes.extend(extra_opcodes); - let (round_constants, extra_opcodes, mut num_witness) = prepare_round_constants(num_witness); - new_opcodes.extend(extra_opcodes); - // split the input into blocks of size 16 - let input: Vec> = input.chunks(16).map(|block| block.to_vec()).collect(); - - // process sha256 blocks - for i in &input { - let (new_rolling_hash, extra_opcodes, updated_witness_counter) = - sha256_block(i, rolling_hash.clone(), round_constants.clone(), num_witness); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - rolling_hash = new_rolling_hash; - } - - // decompose the result bytes in u32 to u8 - let (extra_opcodes, byte1, num_witness) = - byte_decomposition(Expression::from(rolling_hash[0].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte2, num_witness) = - byte_decomposition(Expression::from(rolling_hash[1].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte3, num_witness) = - byte_decomposition(Expression::from(rolling_hash[2].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte4, num_witness) = - byte_decomposition(Expression::from(rolling_hash[3].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte5, num_witness) = - byte_decomposition(Expression::from(rolling_hash[4].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte6, num_witness) = - byte_decomposition(Expression::from(rolling_hash[5].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte7, num_witness) = - byte_decomposition(Expression::from(rolling_hash[6].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - let (extra_opcodes, byte8, num_witness) = - byte_decomposition(Expression::from(rolling_hash[7].inner), 4, num_witness); - new_opcodes.extend(extra_opcodes); - - let result = vec![byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8] - .into_iter() - .flatten() - .collect(); - - (result, num_witness, new_opcodes) -} - -pub(crate) fn pad(number: u32, bit_size: u32, mut num_witness: u32) -> (u32, Witness, Vec) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let pad = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(number as u128), - })], - outputs: vec![BrilligOutputs::Simple(pad)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - - let range = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: pad, num_bits: bit_size }, - }); - new_opcodes.push(range); - - (num_witness, pad, new_opcodes) -} - -fn sha256_block( - input: &[UInt32], - rolling_hash: Vec, - round_constants: Vec, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut w = Vec::new(); - w.extend(input.to_owned()); - - for i in 16..64 { - // calculate s0 `w[i - 15].ror(7) ^ w[i - 15].ror(18) ^ (w[i - 15] >> 3)` - let (a1, extra_opcodes, updated_witness_counter) = w[i - 15].ror(7, num_witness); - new_opcodes.extend(extra_opcodes); - let (a2, extra_opcodes, updated_witness_counter) = - w[i - 15].ror(18, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a3, extra_opcodes, updated_witness_counter) = - w[i - 15].rightshift(3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a4, extra_opcodes, updated_witness_counter) = a1.xor(&a2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (s0, extra_opcodes, updated_witness_counter) = a4.xor(&a3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate s1 `w[i - 2].ror(17) ^ w[i - 2].ror(19) ^ (w[i - 2] >> 10)` - let (b1, extra_opcodes, updated_witness_counter) = - w[i - 2].ror(17, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b2, extra_opcodes, updated_witness_counter) = - w[i - 2].ror(19, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b3, extra_opcodes, updated_witness_counter) = - w[i - 2].rightshift(10, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b4, extra_opcodes, updated_witness_counter) = b1.xor(&b2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (s1, extra_opcodes, updated_witness_counter) = b4.xor(&b3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate w[i] `w[i - 16] + w[i - 7] + s0 + s1` - let (c1, extra_opcodes, updated_witness_counter) = - w[i - 16].add(&w[i - 7], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c2, extra_opcodes, updated_witness_counter) = c1.add(&s0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c3, extra_opcodes, updated_witness_counter) = c2.add(&s1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - w.push(c3); - num_witness = updated_witness_counter; - } - - let mut a = rolling_hash[0]; - let mut b = rolling_hash[1]; - let mut c = rolling_hash[2]; - let mut d = rolling_hash[3]; - let mut e = rolling_hash[4]; - let mut f = rolling_hash[5]; - let mut g = rolling_hash[6]; - let mut h = rolling_hash[7]; - - #[allow(non_snake_case)] - for i in 0..64 { - // calculate S1 `e.ror(6) ^ e.ror(11) ^ e.ror(25)` - let (a1, extra_opcodes, updated_witness_counter) = e.ror(6, num_witness); - new_opcodes.extend(extra_opcodes); - let (a2, extra_opcodes, updated_witness_counter) = e.ror(11, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a3, extra_opcodes, updated_witness_counter) = e.ror(25, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (a4, extra_opcodes, updated_witness_counter) = a1.xor(&a2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (S1, extra_opcodes, updated_witness_counter) = a4.xor(&a3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate ch `(e & f) + (~e & g)` - let (b1, extra_opcodes, updated_witness_counter) = e.and(&f, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b2, extra_opcodes, updated_witness_counter) = e.not(updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (b3, extra_opcodes, updated_witness_counter) = b2.and(&g, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (ch, extra_opcodes, updated_witness_counter) = b1.add(&b3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // caculate temp1 `h + S1 + ch + round_constants[i] + w[i]` - let (c1, extra_opcodes, updated_witness_counter) = h.add(&S1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c2, extra_opcodes, updated_witness_counter) = c1.add(&ch, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (c3, extra_opcodes, updated_witness_counter) = - c2.add(&round_constants[i], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (temp1, extra_opcodes, updated_witness_counter) = - c3.add(&w[i], updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate S0 `a.ror(2) ^ a.ror(13) ^ a.ror(22)` - let (d1, extra_opcodes, updated_witness_counter) = a.ror(2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d2, extra_opcodes, updated_witness_counter) = a.ror(13, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d3, extra_opcodes, updated_witness_counter) = a.ror(22, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (d4, extra_opcodes, updated_witness_counter) = d1.xor(&d2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (S0, extra_opcodes, updated_witness_counter) = d4.xor(&d3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate T0 `b & c` - let (T0, extra_opcodes, updated_witness_counter) = b.and(&c, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate maj `(a & (b + c - (T0 + T0))) + T0` which is the same as `(a & b) ^ (a & c) ^ (b & c)` - let (e1, extra_opcodes, updated_witness_counter) = T0.add(&T0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e2, extra_opcodes, updated_witness_counter) = c.sub(&e1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e3, extra_opcodes, updated_witness_counter) = b.add(&e2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (e4, extra_opcodes, updated_witness_counter) = a.and(&e3, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - let (maj, extra_opcodes, updated_witness_counter) = e4.add(&T0, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - // calculate temp2 `S0 + maj` - let (temp2, extra_opcodes, updated_witness_counter) = S0.add(&maj, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - - h = g; - g = f; - f = e; - let (new_e, extra_opcodes, updated_witness_counter) = - d.add(&temp1, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - d = c; - c = b; - b = a; - let (new_a, extra_opcodes, updated_witness_counter) = - temp1.add(&temp2, updated_witness_counter); - new_opcodes.extend(extra_opcodes); - num_witness = updated_witness_counter; - a = new_a; - e = new_e; - } - - let mut output = Vec::new(); - let (output0, extra_opcodes, num_witness) = a.add(&rolling_hash[0], num_witness); - new_opcodes.extend(extra_opcodes); - let (output1, extra_opcodes, num_witness) = b.add(&rolling_hash[1], num_witness); - new_opcodes.extend(extra_opcodes); - let (output2, extra_opcodes, num_witness) = c.add(&rolling_hash[2], num_witness); - new_opcodes.extend(extra_opcodes); - let (output3, extra_opcodes, num_witness) = d.add(&rolling_hash[3], num_witness); - new_opcodes.extend(extra_opcodes); - let (output4, extra_opcodes, num_witness) = e.add(&rolling_hash[4], num_witness); - new_opcodes.extend(extra_opcodes); - let (output5, extra_opcodes, num_witness) = f.add(&rolling_hash[5], num_witness); - new_opcodes.extend(extra_opcodes); - let (output6, extra_opcodes, num_witness) = g.add(&rolling_hash[6], num_witness); - new_opcodes.extend(extra_opcodes); - let (output7, extra_opcodes, num_witness) = h.add(&rolling_hash[7], num_witness); - new_opcodes.extend(extra_opcodes); - - output.push(output0); - output.push(output1); - output.push(output2); - output.push(output3); - output.push(output4); - output.push(output5); - output.push(output6); - output.push(output7); - - (output, new_opcodes, num_witness) -} - -/// Load initial state constants of Sha256 -pub(crate) fn prepare_state_constants(mut num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut new_witnesses = Vec::new(); - - for i in INIT_CONSTANTS { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(i, num_witness); - new_opcodes.extend(extra_opcodes); - new_witnesses.push(new_witness); - num_witness = updated_witness_counter; - } - - (new_witnesses, new_opcodes, num_witness) -} - -/// Load round constants of Sha256 -pub(crate) fn prepare_round_constants(mut num_witness: u32) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut new_witnesses = Vec::new(); - - for i in ROUND_CONSTANTS { - let (new_witness, extra_opcodes, updated_witness_counter) = - UInt32::load_constant(i, num_witness); - new_opcodes.extend(extra_opcodes); - new_witnesses.push(new_witness); - num_witness = updated_witness_counter; - } - - (new_witnesses, new_opcodes, num_witness) -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs deleted file mode 100644 index 6f4039835f7..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint.rs +++ /dev/null @@ -1,648 +0,0 @@ -#[macro_export] -macro_rules! impl_uint { - ( - $name:ident, - $type:ty, - $size:expr - ) => { - use acir::{ - brillig::{self, RegisterIndex}, - circuit::{ - brillig::{Brillig, BrilligInputs, BrilligOutputs}, - directives::QuotientDirective, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, - }; - use $crate::helpers::VariableStore; - - /// UInt contains a witness that points to a field element that represents a u32 integer - /// It has a inner field of type [Witness] that points to the field element and width = 32 - #[derive(Copy, Clone, Debug)] - pub struct $name { - pub(crate) inner: Witness, - width: u32, - } - - impl $name { - #[cfg(any(test, feature = "testing"))] - pub fn get_inner(&self) -> Witness { - self.inner - } - } - - impl $name { - /// Initialize A new [UInt] type with a [Witness] - pub fn new(witness: Witness) -> Self { - $name { inner: witness, width: $size } - } - - /// Get u(n) + 1 - pub(crate) fn get_max_plus_one( - &self, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(2_u128.pow(self.width)), - })], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Load a constant into the circuit - pub(crate) fn load_constant( - constant: $type, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(constant as u128), - })], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::Stop], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Returns the quotient and remainder such that lhs = rhs * quotient + remainder - // This should be the same as its equivalent in the Noir repo - pub fn euclidean_division( - lhs: &$name, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, $name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let q_witness = variables.new_variable(); - let r_witness = variables.new_variable(); - - // compute quotient using directive function - let quotient_opcode = Opcode::Directive( - acir::circuit::directives::Directive::Quotient(QuotientDirective { - a: lhs.inner.into(), - b: rhs.inner.into(), - q: q_witness, - r: r_witness, - predicate: None, - }), - ); - new_opcodes.push(quotient_opcode); - - // make sure r and q are in 32 bit range - let r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: r_witness, num_bits: lhs.width }, - }); - let q_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: q_witness, num_bits: lhs.width }, - }); - new_opcodes.push(r_range_opcode); - new_opcodes.push(q_range_opcode); - let num_witness = variables.finalize(); - - // constrain r < rhs - let (rhs_sub_r, extra_opcodes, num_witness) = - rhs.sub_no_overflow(&$name::new(r_witness), num_witness); - new_opcodes.extend(extra_opcodes); - let rhs_sub_r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: rhs_sub_r.inner, num_bits: lhs.width }, - }); - new_opcodes.push(rhs_sub_r_range_opcode); - - // constrain lhs = rhs * quotient + remainder - let rhs_expr = Expression::from(rhs.inner); - let lhs_constraint = Expression::from(lhs.inner); - let rhs_constraint = &rhs_expr * &Expression::from(q_witness); - let rhs_constraint = &rhs_constraint.unwrap() + &Expression::from(r_witness); - let div_euclidean = &lhs_constraint - &rhs_constraint; - new_opcodes.push(Opcode::Arithmetic(div_euclidean)); - - ($name::new(q_witness), $name::new(r_witness), new_opcodes, num_witness) - } - - /// Rotate left `rotation` bits. `(x << rotation) | (x >> (width - rotation))` - // This should be the same as `u32.rotate_left(rotation)` in rust stdlib - pub fn rol(&self, rotation: u32, num_witness: u32) -> ($name, Vec, u32) { - let rotation = rotation % self.width; - let mut new_opcodes = Vec::new(); - let (right_shift, extra_opcodes, num_witness) = - self.rightshift(self.width - rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (left_shift, extra_opcodes, num_witness) = - self.leftshift(rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (result, extra_opcodes, num_witness) = left_shift.or(&right_shift, num_witness); - new_opcodes.extend(extra_opcodes); - - (result, new_opcodes, num_witness) - } - - /// Rotate right `rotation` bits. `(x >> rotation) | (x << (width - rotation))` - // This should be the same as `u32.rotate_right(rotation)` in rust stdlib - pub fn ror(&self, rotation: u32, num_witness: u32) -> ($name, Vec, u32) { - let rotation = rotation % self.width; - let mut new_opcodes = Vec::new(); - let (left_shift, extra_opcodes, num_witness) = - self.leftshift(self.width - rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (right_shift, extra_opcodes, num_witness) = - self.rightshift(rotation, num_witness); - new_opcodes.extend(extra_opcodes); - let (result, extra_opcodes, num_witness) = left_shift.or(&right_shift, num_witness); - new_opcodes.extend(extra_opcodes); - - (result, new_opcodes, num_witness) - } - - /// left shift by `bits` - pub fn leftshift(&self, bits: u32, num_witness: u32) -> ($name, Vec, u32) { - let bits = bits % self.width; - let mut new_opcodes = Vec::new(); - let two: $type = 2; - let (two_pow_rhs, extra_opcodes, num_witness) = - $name::load_constant(two.pow(bits), num_witness); - new_opcodes.extend(extra_opcodes); - let (left_shift, extra_opcodes, num_witness) = self.mul(&two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (left_shift, new_opcodes, num_witness) - } - - /// right shift by `bits` - pub fn rightshift(&self, bits: u32, num_witness: u32) -> ($name, Vec, u32) { - let bits = bits % self.width; - let mut new_opcodes = Vec::new(); - let two: $type = 2; - let (two_pow_rhs, extra_opcodes, num_witness) = - $name::load_constant(two.pow(bits), num_witness); - new_opcodes.extend(extra_opcodes); - let (right_shift, _, extra_opcodes, num_witness) = - $name::euclidean_division(self, &two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (right_shift, new_opcodes, num_witness) - } - - /// Caculate and constrain `self` + `rhs` - pub fn add(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate `self` + `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain addition - let mut add_expr = Expression::from(new_witness); - add_expr.push_addition_term(-FieldElement::one(), self.inner); - add_expr.push_addition_term(-FieldElement::one(), rhs.inner); - new_opcodes.push(Opcode::Arithmetic(add_expr)); - - // mod 2^width to get final result as the remainder - let (two_pow_width, extra_opcodes, num_witness) = - self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, add_mod, extra_opcodes, num_witness) = $name::euclidean_division( - &$name::new(new_witness), - &two_pow_width, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - - (add_mod, new_opcodes, num_witness) - } - - /// Caculate and constrain `self` - `rhs` - pub fn sub(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate 2^32 + self - rhs to avoid overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(1_u128 << self.width), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = FieldElement::from(1_u128 << self.width); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - // mod 2^width to get final result as the remainder - let (two_pow_width, extra_opcodes, num_witness) = - self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, sub_mod, extra_opcodes, num_witness) = $name::euclidean_division( - &$name::new(new_witness), - &two_pow_width, - num_witness, - ); - new_opcodes.extend(extra_opcodes); - - (sub_mod, new_opcodes, num_witness) - } - - /// Calculate and constrain `self` - `rhs` - 1 without allowing overflow - /// This is a helper function to `euclidean_division` - // There is a `-1` because theres a case where rhs = 2^32 and remainder = 0 - pub(crate) fn sub_no_overflow( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calculate self - rhs - 1 - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::one(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = -FieldElement::one(); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` * `rhs` - pub(crate) fn mul( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - // calulate `self` * `rhs` with overflow - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryFieldOp { - op: brillig::BinaryFieldOp::Mul, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain mul - let mut mul_constraint = Expression::from(new_witness); - mul_constraint.push_multiplication_term( - -FieldElement::one(), - self.inner, - rhs.inner, - ); - new_opcodes.push(Opcode::Arithmetic(mul_constraint)); - - // mod 2^width to get final result as the remainder - let (two_pow_rhs, extra_opcodes, num_witness) = self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - let (_, mul_mod, extra_opcodes, num_witness) = - $name::euclidean_division(&$name::new(new_witness), &two_pow_rhs, num_witness); - new_opcodes.extend(extra_opcodes); - - (mul_mod, new_opcodes, num_witness) - } - - /// Calculate and constrain `self` and `rhs` - pub fn and(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let num_witness = variables.finalize(); - let and_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::AND { - lhs: FunctionInput { witness: self.inner, num_bits: self.width }, - rhs: FunctionInput { witness: rhs.inner, num_bits: self.width }, - output: new_witness, - }); - new_opcodes.push(and_opcode); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` xor `rhs` - pub fn xor(&self, rhs: &$name, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let num_witness = variables.finalize(); - let xor_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::XOR { - lhs: FunctionInput { witness: self.inner, num_bits: self.width }, - rhs: FunctionInput { witness: rhs.inner, num_bits: self.width }, - output: new_witness, - }); - new_opcodes.push(xor_opcode); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` or `rhs` - pub fn or(&self, rhs: &$name, num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - - // a | b = (a & b) + (a ^ b) - let (a_and_b, extra_opcodes, num_witness) = self.and(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - let (a_xor_b, extra_opcodes, num_witness) = self.xor(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - let (or, extra_opcodes, num_witness) = a_and_b.add(&a_xor_b, num_witness); - new_opcodes.extend(extra_opcodes); - - (or, new_opcodes, num_witness) - } - - /// Calculate and constrain not `self` - pub(crate) fn not(&self, mut num_witness: u32) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from((1_u128 << self.width) - 1), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: self.width, - lhs: RegisterIndex::from(1), - rhs: RegisterIndex::from(0), - destination: RegisterIndex::from(0), - }], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - let mut not_constraint = Expression::from(new_witness); - not_constraint.push_addition_term(FieldElement::one(), self.inner); - not_constraint.q_c = -FieldElement::from((1_u128 << self.width) - 1); - new_opcodes.push(Opcode::Arithmetic(not_constraint)); - - ($name::new(new_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` >= `rhs` - // This should be similar to its equivalent in the Noir repo - pub(crate) fn more_than_eq_comparison( - &self, - rhs: &$name, - mut num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let new_witness = variables.new_variable(); - let q_witness = variables.new_variable(); - let r_witness = variables.new_variable(); - - // calculate 2^32 + self - rhs - let brillig_opcode = Opcode::Brillig(Brillig { - inputs: vec![ - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), self.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![(FieldElement::one(), rhs.inner)], - q_c: FieldElement::zero(), - }), - BrilligInputs::Single(Expression { - mul_terms: vec![], - linear_combinations: vec![], - q_c: FieldElement::from(1_u128 << self.width), - }), - ], - outputs: vec![BrilligOutputs::Simple(new_witness)], - bytecode: vec![ - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Add, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(2), - destination: RegisterIndex::from(0), - }, - brillig::Opcode::BinaryIntOp { - op: brillig::BinaryIntOp::Sub, - bit_size: 127, - lhs: RegisterIndex::from(0), - rhs: RegisterIndex::from(1), - destination: RegisterIndex::from(0), - }, - ], - predicate: None, - }); - new_opcodes.push(brillig_opcode); - let num_witness = variables.finalize(); - - // constrain subtraction - let mut sub_constraint = Expression::from(self.inner); - sub_constraint.push_addition_term(-FieldElement::one(), new_witness); - sub_constraint.push_addition_term(-FieldElement::one(), rhs.inner); - sub_constraint.q_c = FieldElement::from(1_u128 << self.width); - new_opcodes.push(Opcode::Arithmetic(sub_constraint)); - - let (two_pow_rhs, extra_opcodes, num_witness) = self.get_max_plus_one(num_witness); - new_opcodes.extend(extra_opcodes); - - // constraint 2^{max_bits} + a - b = q * 2^{max_bits} + r - // q = 1 if a == b - // q = 1 if a > b - // q = 0 if a < b - let quotient_opcode = Opcode::Directive( - acir::circuit::directives::Directive::Quotient(QuotientDirective { - a: new_witness.into(), - b: two_pow_rhs.inner.into(), - q: q_witness, - r: r_witness, - predicate: None, - }), - ); - new_opcodes.push(quotient_opcode); - - // make sure r in 32 bit range and q is 1 bit - let r_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: r_witness, num_bits: self.width }, - }); - let q_range_opcode = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: q_witness, num_bits: 1 }, - }); - new_opcodes.push(r_range_opcode); - new_opcodes.push(q_range_opcode); - - ($name::new(q_witness), new_opcodes, num_witness) - } - - /// Calculate and constrain `self` < `rhs` - pub fn less_than_comparison( - &self, - rhs: &$name, - num_witness: u32, - ) -> ($name, Vec, u32) { - let mut new_opcodes = Vec::new(); - let (mut comparison, extra_opcodes, num_witness) = - self.more_than_eq_comparison(rhs, num_witness); - new_opcodes.extend(extra_opcodes); - comparison.width = 1; - - // `self` < `rhs` == not `self` >= `rhs` - let (less_than, extra_opcodes, num_witness) = comparison.not(num_witness); - new_opcodes.extend(extra_opcodes); - - (less_than, new_opcodes, num_witness) - } - } - }; -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs deleted file mode 100644 index 58314d6ba4c..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint32.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::impl_uint; - -impl_uint!(UInt32, u32, 32); -impl UInt32 { - /// Load a [UInt32] from four [Witness]es each representing a [u8] - pub(crate) fn from_witnesses( - witnesses: &[Witness], - mut num_witness: u32, - ) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let mut uint = Vec::new(); - - for i in 0..witnesses.len() / 4 { - let new_witness = variables.new_variable(); - uint.push(UInt32::new(new_witness)); - let mut expr = Expression::from(new_witness); - for j in 0..4 { - let scaling_factor_value = 1 << (8 * (3 - j) as u32); - let scaling_factor = FieldElement::from(scaling_factor_value as u128); - expr.push_addition_term(-scaling_factor, witnesses[i * 4 + j]); - } - - new_opcodes.push(Opcode::Arithmetic(expr)); - } - let num_witness = variables.finalize(); - - (uint, new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs deleted file mode 100644 index cddb23275cb..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint64.rs +++ /dev/null @@ -1,30 +0,0 @@ -use crate::impl_uint; - -impl_uint!(UInt64, u64, 64); -impl UInt64 { - /// Load a [UInt64] from eight [Witness]es each representing a [u8] - pub(crate) fn from_witnesses( - witnesses: &[Witness], - mut num_witness: u32, - ) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - let mut uint = Vec::new(); - - for i in 0..witnesses.len() / 8 { - let new_witness = variables.new_variable(); - uint.push(UInt64::new(new_witness)); - let mut expr = Expression::from(new_witness); - for j in 0..8 { - let scaling_factor_value: u128 = 1 << (8 * (7 - j) as u32); - let scaling_factor = FieldElement::from(scaling_factor_value); - expr.push_addition_term(-scaling_factor, witnesses[i * 8 + j]); - } - - new_opcodes.push(Opcode::Arithmetic(expr)); - } - let num_witness = variables.finalize(); - - (uint, new_opcodes, num_witness) - } -} diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs deleted file mode 100644 index 2ffc2cae1be..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/uint8.rs +++ /dev/null @@ -1,2 +0,0 @@ -use crate::impl_uint; -impl_uint!(UInt8, u8, 8); diff --git a/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs b/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs deleted file mode 100644 index 4921c71c9fe..00000000000 --- a/acvm-repo/stdlib/src/blackbox_fallbacks/utils.rs +++ /dev/null @@ -1,175 +0,0 @@ -use crate::helpers::VariableStore; -use acir::{ - circuit::{ - directives::Directive, - opcodes::{BlackBoxFuncCall, FunctionInput}, - Opcode, - }, - native_types::{Expression, Witness}, - FieldElement, -}; - -fn round_to_nearest_mul_8(num_bits: u32) -> u32 { - let remainder = num_bits % 8; - - if remainder == 0 { - return num_bits; - } - - num_bits + 8 - remainder -} - -pub(crate) fn round_to_nearest_byte(num_bits: u32) -> u32 { - round_to_nearest_mul_8(num_bits) / 8 -} - -pub(crate) fn boolean_expr(expr: &Expression, variables: &mut VariableStore) -> Expression { - &mul_with_witness(expr, expr, variables) - expr -} - -/// Returns an expression which represents `lhs * rhs` -/// -/// If one has multiplicative term and the other is of degree one or more, -/// the function creates [intermediate variables][`Witness`] accordingly. -/// There are two cases where we can optimize the multiplication between two expressions: -/// 1. If both expressions have at most a total degree of 1 in each term, then we can just multiply them -/// as each term in the result will be degree-2. -/// 2. If one expression is a constant, then we can just multiply the constant with the other expression -/// -/// (1) is because an [`Expression`] can hold at most a degree-2 univariate polynomial -/// which is what you get when you multiply two degree-1 univariate polynomials. -pub(crate) fn mul_with_witness( - lhs: &Expression, - rhs: &Expression, - variables: &mut VariableStore, -) -> Expression { - use std::borrow::Cow; - let lhs_is_linear = lhs.is_linear(); - let rhs_is_linear = rhs.is_linear(); - - // Case 1: Both expressions have at most a total degree of 1 in each term - if lhs_is_linear && rhs_is_linear { - return (lhs * rhs) - .expect("one of the expressions is a constant and so this should not fail"); - } - - // Case 2: One or both of the sides needs to be reduced to a degree-1 univariate polynomial - let lhs_reduced = if lhs_is_linear { - Cow::Borrowed(lhs) - } else { - Cow::Owned(variables.new_variable().into()) - }; - - // If the lhs and rhs are the same, then we do not need to reduce - // rhs, we only need to square the lhs. - if lhs == rhs { - return (&*lhs_reduced * &*lhs_reduced) - .expect("Both expressions are reduced to be degree<=1"); - }; - - let rhs_reduced = if rhs_is_linear { - Cow::Borrowed(rhs) - } else { - Cow::Owned(variables.new_variable().into()) - }; - - (&*lhs_reduced * &*rhs_reduced).expect("Both expressions are reduced to be degree<=1") -} - -// Generates opcodes and directives to bit decompose the input `opcode` -// Returns the bits and the updated witness counter -// TODO:Ideally, we return the updated witness counter, or we require the input -// TODO to be a VariableStore. We are not doing this because we want migration to -// TODO be less painful -pub(crate) fn bit_decomposition( - opcode: Expression, - bit_size: u32, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - - // First create a witness for each bit - let mut bit_vector = Vec::with_capacity(bit_size as usize); - for _ in 0..bit_size { - bit_vector.push(variables.new_variable()); - } - - // Next create a directive which computes those bits. - new_opcodes.push(Opcode::Directive(Directive::ToLeRadix { - a: opcode.clone(), - b: bit_vector.clone(), - radix: 2, - })); - - // Now apply constraints to the bits such that they are the bit decomposition - // of the input and each bit is actually a bit - let mut binary_exprs = Vec::new(); - let mut bit_decomp_constraint = opcode; - let mut two_pow: FieldElement = FieldElement::one(); - let two = FieldElement::from(2_i128); - for &bit in &bit_vector { - // Bit constraint to ensure each bit is a zero or one; bit^2 - bit = 0 - let expr = boolean_expr(&bit.into(), &mut variables); - binary_exprs.push(Opcode::Arithmetic(expr)); - - // Constraint to ensure that the bits are constrained to be a bit decomposition - // of the input - // ie \sum 2^i * x_i = input - bit_decomp_constraint.push_addition_term(-two_pow, bit); - two_pow = two * two_pow; - } - - new_opcodes.extend(binary_exprs); - bit_decomp_constraint.sort(); // TODO: we have an issue open to check if this is needed. Ideally, we remove it. - new_opcodes.push(Opcode::Arithmetic(bit_decomp_constraint)); - - (new_opcodes, bit_vector, variables.finalize()) -} - -// TODO: Maybe this can be merged with `bit_decomposition` -pub(crate) fn byte_decomposition( - opcode: Expression, - num_bytes: u32, - mut num_witness: u32, -) -> (Vec, Vec, u32) { - let mut new_opcodes = Vec::new(); - let mut variables = VariableStore::new(&mut num_witness); - - // First create a witness for each byte - let mut vector = Vec::with_capacity(num_bytes as usize); - for _ in 0..num_bytes { - vector.push(variables.new_variable()); - } - - // Next create a directive which computes those byte. - new_opcodes.push(Opcode::Directive(Directive::ToLeRadix { - a: opcode.clone(), - b: vector.clone(), - radix: 256, - })); - vector.reverse(); - - // Now apply constraints to the bytes such that they are the byte decomposition - // of the input and each byte is actually a byte - let mut byte_exprs = Vec::new(); - let mut decomp_constraint = opcode; - let byte_shift: u128 = 256; - for (i, v) in vector.iter().enumerate() { - let range = Opcode::BlackBoxFuncCall(BlackBoxFuncCall::RANGE { - input: FunctionInput { witness: *v, num_bits: 8 }, - }); - let scaling_factor_value = byte_shift.pow(num_bytes - 1 - i as u32); - let scaling_factor = FieldElement::from(scaling_factor_value); - - decomp_constraint.push_addition_term(-scaling_factor, *v); - - byte_exprs.push(range); - } - - new_opcodes.extend(byte_exprs); - decomp_constraint.sort(); - new_opcodes.push(Opcode::Arithmetic(decomp_constraint)); - - (new_opcodes, vector, variables.finalize()) -} diff --git a/acvm-repo/stdlib/src/helpers.rs b/acvm-repo/stdlib/src/helpers.rs deleted file mode 100644 index 5ab258368f4..00000000000 --- a/acvm-repo/stdlib/src/helpers.rs +++ /dev/null @@ -1,23 +0,0 @@ -use acir::native_types::Witness; - -// Simple helper struct to keep track of the current witness index -// and create variables -pub struct VariableStore<'a> { - witness_index: &'a mut u32, -} - -impl<'a> VariableStore<'a> { - pub fn new(witness_index: &'a mut u32) -> Self { - Self { witness_index } - } - - pub fn new_variable(&mut self) -> Witness { - let witness = Witness(*self.witness_index); - *self.witness_index += 1; - witness - } - - pub fn finalize(self) -> u32 { - *self.witness_index - } -} diff --git a/acvm-repo/stdlib/src/lib.rs b/acvm-repo/stdlib/src/lib.rs deleted file mode 100644 index 9aecde631fb..00000000000 --- a/acvm-repo/stdlib/src/lib.rs +++ /dev/null @@ -1,7 +0,0 @@ -#![forbid(unsafe_code)] -#![warn(unreachable_pub)] -#![warn(clippy::semicolon_if_nothing_returned)] -#![cfg_attr(not(test), warn(unused_crate_dependencies, unused_extern_crates))] - -pub mod blackbox_fallbacks; -pub mod helpers; diff --git a/aztec_macros/src/lib.rs b/aztec_macros/src/lib.rs index 1b433c33df3..c9adece4eb5 100644 --- a/aztec_macros/src/lib.rs +++ b/aztec_macros/src/lib.rs @@ -31,25 +31,46 @@ impl MacroProcessor for AztecMacro { } } +const FUNCTION_TREE_HEIGHT: u32 = 5; +const MAX_CONTRACT_FUNCTIONS: usize = 2_usize.pow(FUNCTION_TREE_HEIGHT); + #[derive(Debug, Clone)] pub enum AztecMacroError { - AztecNotFound, - AztecComputeNoteHashAndNullifierNotFound { span: Span }, + AztecDepNotFound, + ComputeNoteHashAndNullifierNotFound { span: Span }, + ContractHasTooManyFunctions { span: Span }, + ContractConstructorMissing { span: Span }, + UnsupportedFunctionArgumentType { span: Span, typ: UnresolvedTypeData }, } impl From for MacroError { fn from(err: AztecMacroError) -> Self { match err { - AztecMacroError::AztecNotFound {} => MacroError { + AztecMacroError::AztecDepNotFound {} => MacroError { primary_message: "Aztec dependency not found. Please add aztec as a dependency in your Cargo.toml. For more information go to https://docs.aztec.network/dev_docs/debugging/aztecnr-errors#aztec-dependency-not-found-please-add-aztec-as-a-dependency-in-your-nargotoml".to_owned(), secondary_message: None, span: None, }, - AztecMacroError::AztecComputeNoteHashAndNullifierNotFound { span } => MacroError { + AztecMacroError::ComputeNoteHashAndNullifierNotFound { span } => MacroError { primary_message: "compute_note_hash_and_nullifier function not found. Define it in your contract. For more information go to https://docs.aztec.network/dev_docs/debugging/aztecnr-errors#compute_note_hash_and_nullifier-function-not-found-define-it-in-your-contract".to_owned(), secondary_message: None, span: Some(span), }, + AztecMacroError::ContractHasTooManyFunctions { span } => MacroError { + primary_message: format!("Contract can only have a maximum of {} functions", MAX_CONTRACT_FUNCTIONS), + secondary_message: None, + span: Some(span), + }, + AztecMacroError::ContractConstructorMissing { span } => MacroError { + primary_message: "Contract must have a constructor function".to_owned(), + secondary_message: None, + span: Some(span), + }, + AztecMacroError::UnsupportedFunctionArgumentType { span, typ } => MacroError { + primary_message: format!("Provided parameter type `{typ:?}` is not supported in Aztec contract interface"), + secondary_message: None, + span: Some(span), + }, } } } @@ -201,7 +222,9 @@ fn transform( // Covers all functions in the ast for submodule in ast.submodules.iter_mut().filter(|submodule| submodule.is_contract) { - if transform_module(&mut submodule.contents, crate_id, context)? { + if transform_module(&mut submodule.contents, crate_id, context) + .map_err(|(err, file_id)| (err.into(), file_id))? + { check_for_aztec_dependency(crate_id, context)?; include_relevant_imports(&mut submodule.contents); } @@ -243,7 +266,7 @@ fn check_for_aztec_dependency( if has_aztec_dependency { Ok(()) } else { - Err((AztecMacroError::AztecNotFound.into(), crate_graph.root_file_id)) + Err((AztecMacroError::AztecDepNotFound.into(), crate_graph.root_file_id)) } } @@ -252,12 +275,15 @@ fn check_for_storage_definition(module: &SortedModule) -> bool { module.types.iter().any(|r#struct| r#struct.name.0.contents == "Storage") } -// Check if "compute_note_hash_and_nullifier(Field,Field,Field,[Field; N]) -> [Field; 4]" is defined +// Check if "compute_note_hash_and_nullifier(AztecAddress,Field,Field,[Field; N]) -> [Field; 4]" is defined fn check_for_compute_note_hash_and_nullifier_definition(module: &SortedModule) -> bool { module.functions.iter().any(|func| { func.def.name.0.contents == "compute_note_hash_and_nullifier" && func.def.parameters.len() == 4 - && func.def.parameters[0].typ.typ == UnresolvedTypeData::FieldElement + && match &func.def.parameters[0].typ.typ { + UnresolvedTypeData::Named(path, _) => path.segments.last().unwrap().0.contents == "AztecAddress", + _ => false, + } && func.def.parameters[1].typ.typ == UnresolvedTypeData::FieldElement && func.def.parameters[2].typ.typ == UnresolvedTypeData::FieldElement // checks if the 4th parameter is an array and the Box in @@ -299,7 +325,7 @@ fn transform_module( module: &mut SortedModule, crate_id: &CrateId, context: &HirContext, -) -> Result { +) -> Result { let mut has_transformed_module = false; // Check for a user defined storage struct @@ -308,8 +334,7 @@ fn transform_module( if storage_defined && !check_for_compute_note_hash_and_nullifier_definition(module) { let crate_graph = &context.crate_graph[crate_id]; return Err(( - AztecMacroError::AztecComputeNoteHashAndNullifierNotFound { span: Span::default() } - .into(), + AztecMacroError::ComputeNoteHashAndNullifierNotFound { span: Span::default() }, crate_graph.root_file_id, )); } @@ -323,11 +348,14 @@ fn transform_module( for func in module.functions.iter_mut() { for secondary_attribute in func.def.attributes.secondary.clone() { + let crate_graph = &context.crate_graph[crate_id]; if is_custom_attribute(&secondary_attribute, "aztec(private)") { - transform_function("Private", func, storage_defined); + transform_function("Private", func, storage_defined) + .map_err(|err| (err, crate_graph.root_file_id))?; has_transformed_module = true; } else if is_custom_attribute(&secondary_attribute, "aztec(public)") { - transform_function("Public", func, storage_defined); + transform_function("Public", func, storage_defined) + .map_err(|err| (err, crate_graph.root_file_id))?; has_transformed_module = true; } } @@ -337,6 +365,28 @@ fn transform_module( has_transformed_module = true; } } + + if has_transformed_module { + // We only want to run these checks if the macro processor has found the module to be an Aztec contract. + + if module.functions.len() > MAX_CONTRACT_FUNCTIONS { + let crate_graph = &context.crate_graph[crate_id]; + return Err(( + AztecMacroError::ContractHasTooManyFunctions { span: Span::default() }, + crate_graph.root_file_id, + )); + } + + let constructor_defined = module.functions.iter().any(|func| func.name() == "constructor"); + if !constructor_defined { + let crate_graph = &context.crate_graph[crate_id]; + return Err(( + AztecMacroError::ContractConstructorMissing { span: Span::default() }, + crate_graph.root_file_id, + )); + } + } + Ok(has_transformed_module) } @@ -344,7 +394,11 @@ fn transform_module( /// - A new Input that is provided for a kernel app circuit, named: {Public/Private}ContextInputs /// - Hashes all of the function input variables /// - This instantiates a helper function -fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) { +fn transform_function( + ty: &str, + func: &mut NoirFunction, + storage_defined: bool, +) -> Result<(), AztecMacroError> { let context_name = format!("{}Context", ty); let inputs_name = format!("{}ContextInputs", ty); let return_type_name = format!("{}CircuitPublicInputs", ty); @@ -356,7 +410,7 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) } // Insert the context creation as the first action - let create_context = create_context(&context_name, &func.def.parameters); + let create_context = create_context(&context_name, &func.def.parameters)?; func.def.body.0.splice(0..0, (create_context).iter().cloned()); // Add the inputs to the params @@ -383,6 +437,8 @@ fn transform_function(ty: &str, func: &mut NoirFunction, storage_defined: bool) "Public" => func.def.is_open = true, _ => (), } + + Ok(()) } /// Transform Unconstrained @@ -483,8 +539,8 @@ const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; /// Inserts the following code: /// ```noir /// impl SomeStruct { -/// fn selector() -> Field { -/// aztec::oracle::compute_selector::compute_selector("SIGNATURE_PLACEHOLDER") +/// fn selector() -> FunctionSelector { +/// aztec::protocol_types::abis::function_selector::FunctionSelector::from_signature("SIGNATURE_PLACEHOLDER") /// } /// } /// ``` @@ -495,18 +551,27 @@ const SIGNATURE_PLACEHOLDER: &str = "SIGNATURE_PLACEHOLDER"; fn generate_selector_impl(structure: &NoirStruct) -> TypeImpl { let struct_type = make_type(UnresolvedTypeData::Named(path(structure.name.clone()), vec![])); + let selector_path = + chained_path!("aztec", "protocol_types", "abis", "function_selector", "FunctionSelector"); + let mut from_signature_path = selector_path.clone(); + from_signature_path.segments.push(ident("from_signature")); + let selector_fun_body = BlockExpression(vec![make_statement(StatementKind::Expression(call( - variable_path(chained_path!("aztec", "selector", "compute_selector")), + variable_path(from_signature_path), vec![expression(ExpressionKind::Literal(Literal::Str(SIGNATURE_PLACEHOLDER.to_string())))], )))]); + // Define `FunctionSelector` return type + let return_type = + FunctionReturnType::Ty(make_type(UnresolvedTypeData::Named(selector_path, vec![]))); + let mut selector_fn_def = FunctionDefinition::normal( &ident("selector"), &vec![], &[], &selector_fun_body, &[], - &FunctionReturnType::Ty(make_type(UnresolvedTypeData::FieldElement)), + &return_type, ); selector_fn_def.visibility = FunctionVisibility::Public; @@ -572,7 +637,7 @@ fn create_inputs(ty: &str) -> Param { /// let mut context = PrivateContext::new(inputs, hasher.hash()); /// } /// ``` -fn create_context(ty: &str, params: &[Param]) -> Vec { +fn create_context(ty: &str, params: &[Param]) -> Result, AztecMacroError> { let mut injected_expressions: Vec = vec![]; // `let mut hasher = Hasher::new();` @@ -588,7 +653,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_hasher); // Iterate over each of the function parameters, adding to them to the hasher - params.iter().for_each(|Param { pattern, typ, span: _, visibility: _ }| { + for Param { pattern, typ, span, .. } in params { match pattern { Pattern::Identifier(identifier) => { // Match the type to determine the padding to do @@ -606,13 +671,29 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { UnresolvedTypeData::Integer(..) | UnresolvedTypeData::Bool => { add_cast_to_hasher(identifier) } - _ => unreachable!("[Aztec Noir] Provided parameter type is not supported"), + UnresolvedTypeData::String(..) => { + let (var_bytes, id) = str_to_bytes(identifier); + injected_expressions.push(var_bytes); + add_array_to_hasher( + &id, + &UnresolvedType { + typ: UnresolvedTypeData::Integer(Signedness::Unsigned, 32), + span: None, + }, + ) + } + _ => { + return Err(AztecMacroError::UnsupportedFunctionArgumentType { + typ: unresolved_type.clone(), + span: *span, + }) + } }; injected_expressions.push(expression); } _ => todo!(), // Maybe unreachable? } - }); + } // Create the inputs to the context let inputs_expression = variable("inputs"); @@ -634,7 +715,7 @@ fn create_context(ty: &str, params: &[Param]) -> Vec { injected_expressions.push(let_context); // Return all expressions that will be injected by the hasher - injected_expressions + Ok(injected_expressions) } /// Abstract Return Type @@ -895,6 +976,21 @@ fn add_struct_to_hasher(identifier: &Ident) -> Statement { ))) } +fn str_to_bytes(identifier: &Ident) -> (Statement, Ident) { + // let identifier_as_bytes = identifier.as_bytes(); + let var = variable_ident(identifier.clone()); + let contents = if let ExpressionKind::Variable(p) = &var.kind { + p.segments.first().cloned().unwrap_or_else(|| panic!("No segments")).0.contents + } else { + panic!("Unexpected identifier type") + }; + let bytes_name = format!("{}_bytes", contents); + let var_bytes = assignment(&bytes_name, method_call(var, "as_bytes", vec![])); + let id = Ident::new(bytes_name, Span::default()); + + (var_bytes, id) +} + fn create_loop_over(var: Expression, loop_body: Vec) -> Statement { // If this is an array of primitive types (integers / fields) we can add them each to the hasher // casted to a field @@ -938,7 +1034,7 @@ fn add_array_to_hasher(identifier: &Ident, arr_type: &UnresolvedType) -> Stateme UnresolvedTypeData::Named(..) => { let hasher_method_name = "add_multiple".to_owned(); let call = method_call( - // All serialise on each element + // All serialize on each element arr_index, // variable "serialize", // method name vec![], // args diff --git a/bootstrap.sh b/bootstrap.sh index bf672ac0ad2..5ebe7ade090 100755 --- a/bootstrap.sh +++ b/bootstrap.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0") diff --git a/bootstrap_cache.sh b/bootstrap_cache.sh new file mode 100755 index 00000000000..672702416bd --- /dev/null +++ b/bootstrap_cache.sh @@ -0,0 +1,11 @@ +#!/usr/bin/env bash +set -eu + +cd "$(dirname "$0")" +source ../build-system/scripts/setup_env '' '' mainframe_$USER > /dev/null + +echo -e "\033[1mRetrieving noir packages from remote cache...\033[0m" +extract_repo noir-packages /usr/src/noir/packages ./ +echo -e "\033[1mRetrieving nargo from remote cache...\033[0m" +extract_repo noir /usr/src/noir/target/release ./target/ + diff --git a/compiler/fm/Cargo.toml b/compiler/fm/Cargo.toml index 699f709e9b5..42e4b0c25d7 100644 --- a/compiler/fm/Cargo.toml +++ b/compiler/fm/Cargo.toml @@ -12,5 +12,5 @@ codespan-reporting.workspace = true serde.workspace = true [dev-dependencies] -tempfile = "3.2.0" +tempfile.workspace = true iter-extended.workspace = true diff --git a/compiler/fm/build.rs b/compiler/fm/build.rs deleted file mode 100644 index 747ab4fe1a2..00000000000 --- a/compiler/fm/build.rs +++ /dev/null @@ -1,20 +0,0 @@ -use std::path::Path; - -/// Expects that the given directory is an existing path -fn rerun_if_stdlib_changes(directory: &Path) { - for entry in std::fs::read_dir(directory).unwrap() { - let path = entry.unwrap().path(); - - if path.is_dir() { - rerun_if_stdlib_changes(&path); - } else { - // Tell Cargo that if the given file changes, to rerun this build script. - println!("cargo:rerun-if-changed={}", path.to_string_lossy()); - } - } -} - -fn main() { - let stdlib_src_dir = Path::new("../../noir_stdlib/"); - rerun_if_stdlib_changes(stdlib_src_dir); -} diff --git a/compiler/fm/src/file_map.rs b/compiler/fm/src/file_map.rs index 0cbdc535e40..50412d352ec 100644 --- a/compiler/fm/src/file_map.rs +++ b/compiler/fm/src/file_map.rs @@ -30,7 +30,7 @@ impl From<&PathBuf> for PathString { PathString::from(pb.to_owned()) } } -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct FileMap { files: SimpleFiles, name_to_id: HashMap, @@ -75,6 +75,10 @@ impl FileMap { pub fn get_file_id(&self, file_name: &PathString) -> Option { self.name_to_id.get(file_name).cloned() } + + pub fn all_file_ids(&self) -> impl Iterator { + self.name_to_id.values() + } } impl Default for FileMap { fn default() -> Self { diff --git a/compiler/fm/src/lib.rs b/compiler/fm/src/lib.rs index 2036f2f75c7..e7da3870d9c 100644 --- a/compiler/fm/src/lib.rs +++ b/compiler/fm/src/lib.rs @@ -16,7 +16,7 @@ use std::{ }; pub const FILE_EXTENSION: &str = "nr"; - +#[derive(Clone)] pub struct FileManager { root: PathBuf, file_map: FileMap, @@ -103,35 +103,15 @@ impl FileManager { assert!(old_value.is_none(), "ice: the same path was inserted into the file manager twice"); } - pub fn fetch_file(&self, file_id: FileId) -> File { + pub fn fetch_file(&self, file_id: FileId) -> Option<&str> { // Unwrap as we ensure that all file_id's map to a corresponding file in the file map - self.file_map.get_file(file_id).unwrap() + self.file_map.get_file(file_id).map(|file| file.source()) } - pub fn path(&self, file_id: FileId) -> &Path { + pub fn path(&self, file_id: FileId) -> Option<&Path> { // Unwrap as we ensure that all file_ids are created by the file manager // So all file_ids will points to a corresponding path - self.id_to_path.get(&file_id).unwrap().as_path() - } - - // TODO: This should also ideally not be here, so that the file manager - // TODO: does not know about rust modules. - // TODO: Ideally this is moved to def_collector_mod and we make this method accept a FileManager - pub fn find_module(&self, anchor: FileId, mod_name: &str) -> Result { - let anchor_path = self.path(anchor).with_extension(""); - let anchor_dir = anchor_path.parent().unwrap(); - - // if `anchor` is a `main.nr`, `lib.nr`, `mod.nr` or `{mod_name}.nr`, we check siblings of - // the anchor at `base/mod_name.nr`. - let candidate = if should_check_siblings_for_module(&anchor_path, anchor_dir) { - anchor_dir.join(format!("{mod_name}.{FILE_EXTENSION}")) - } else { - // Otherwise, we check for children of the anchor at `base/anchor/mod_name.nr` - anchor_path.join(format!("{mod_name}.{FILE_EXTENSION}")) - }; - - self.name_to_id(candidate.clone()) - .ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) + self.id_to_path.get(&file_id).map(|path| path.as_path()) } // TODO: This should accept a &Path instead of a PathBuf @@ -140,28 +120,6 @@ impl FileManager { } } -// TODO: This should not be here because the file manager should not know about the -// TODO: rust modules. See comment on `find_module`` -// TODO: Moreover, the check for main, lib, mod should ideally not be done here -/// Returns true if a module's child module's are expected to be in the same directory. -/// Returns false if they are expected to be in a subdirectory matching the name of the module. -fn should_check_siblings_for_module(module_path: &Path, parent_path: &Path) -> bool { - if let Some(filename) = module_path.file_stem() { - // This check also means a `main.nr` or `lib.nr` file outside of the crate root would - // check its same directory for child modules instead of a subdirectory. Should we prohibit - // `main.nr` and `lib.nr` files outside of the crate root? - filename == "main" - || filename == "lib" - || filename == "mod" - || Some(filename) == parent_path.file_stem() - } else { - // If there's no filename, we arbitrarily return true. - // Alternatively, we could panic, but this is left to a different step where we - // ideally have some source location to issue an error. - true - } -} - pub trait NormalizePath { /// Replacement for `std::fs::canonicalize` that doesn't verify the path exists. /// @@ -251,22 +209,6 @@ mod tests { file_path } - #[test] - fn path_resolve_file_module() { - let dir = tempdir().unwrap(); - - let entry_file_name = Path::new("my_dummy_file.nr"); - create_dummy_file(&dir, entry_file_name); - - let mut fm = FileManager::new(dir.path()); - - let file_id = fm.add_file_with_source(entry_file_name, "fn foo() {}".to_string()).unwrap(); - - let dep_file_name = Path::new("foo.nr"); - create_dummy_file(&dir, dep_file_name); - fm.find_module(file_id, "foo").unwrap_err(); - } - #[test] fn path_resolve_file_module_other_ext() { let dir = tempdir().unwrap(); @@ -277,48 +219,7 @@ mod tests { let file_id = fm.add_file_with_source(file_name, "fn foo() {}".to_string()).unwrap(); - assert!(fm.path(file_id).ends_with("foo.nr")); - } - - #[test] - fn path_resolve_sub_module() { - let dir = tempdir().unwrap(); - let mut fm = FileManager::new(dir.path()); - - // Create a lib.nr file at the root. - // we now have dir/lib.nr - let lib_nr_path = create_dummy_file(&dir, Path::new("lib.nr")); - let file_id = fm - .add_file_with_source(lib_nr_path.as_path(), "fn foo() {}".to_string()) - .expect("could not add file to file manager and obtain a FileId"); - - // Create a sub directory - // we now have: - // - dir/lib.nr - // - dir/sub_dir - let sub_dir = TempDir::new_in(&dir).unwrap(); - let sub_dir_name = sub_dir.path().file_name().unwrap().to_str().unwrap(); - - // Add foo.nr to the subdirectory - // we no have: - // - dir/lib.nr - // - dir/sub_dir/foo.nr - let foo_nr_path = create_dummy_file(&sub_dir, Path::new("foo.nr")); - fm.add_file_with_source(foo_nr_path.as_path(), "fn foo() {}".to_string()); - - // Add a parent module for the sub_dir - // we no have: - // - dir/lib.nr - // - dir/sub_dir.nr - // - dir/sub_dir/foo.nr - let sub_dir_nr_path = create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); - fm.add_file_with_source(sub_dir_nr_path.as_path(), "fn foo() {}".to_string()); - - // First check for the sub_dir.nr file and add it to the FileManager - let sub_dir_file_id = fm.find_module(file_id, sub_dir_name).unwrap(); - - // Now check for files in it's subdirectory - fm.find_module(sub_dir_file_id, "foo").unwrap(); + assert!(fm.path(file_id).unwrap().ends_with("foo.nr")); } /// Tests that two identical files that have different paths are treated as the same file diff --git a/compiler/integration-tests/circuits/recursion/src/main.nr b/compiler/integration-tests/circuits/recursion/src/main.nr index e60e4e0b61a..173207766fb 100644 --- a/compiler/integration-tests/circuits/recursion/src/main.nr +++ b/compiler/integration-tests/circuits/recursion/src/main.nr @@ -1,17 +1,15 @@ use dep::std; fn main( - verification_key : [Field; 114], - proof : [Field; 94], - public_inputs : [Field; 1], - key_hash : Field, -) -> pub [Field;16]{ - let input_aggregation_object = [0; 16]; + verification_key: [Field; 114], + proof: [Field; 93], + public_inputs: [Field; 1], + key_hash: Field +) { std::verify_proof( - verification_key.as_slice(), - proof.as_slice(), - public_inputs.as_slice(), - key_hash, - input_aggregation_object + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash ) } diff --git a/compiler/integration-tests/scripts/codegen-verifiers.sh b/compiler/integration-tests/scripts/codegen-verifiers.sh index b3a52217271..e377a3ee3f8 100644 --- a/compiler/integration-tests/scripts/codegen-verifiers.sh +++ b/compiler/integration-tests/scripts/codegen-verifiers.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash self_path=$(dirname "$(readlink -f "$0")") diff --git a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts index 29e2fbc55b8..0a829def09e 100644 --- a/compiler/integration-tests/test/browser/compile_prove_verify.test.ts +++ b/compiler/integration-tests/test/browser/compile_prove_verify.test.ts @@ -1,22 +1,13 @@ import { expect } from '@esm-bundle/chai'; import * as TOML from 'smol-toml'; -import newCompiler, { - CompiledProgram, - PathToFileSourceMap, - compile, - init_log_level as compilerLogLevel, -} from '@noir-lang/noir_wasm'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; import { InputMap } from '@noir-lang/noirc_abi'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; -await newCompiler(); - -compilerLogLevel('INFO'); - const test_cases = [ { case: 'test_programs/execution_success/1_mul', @@ -32,12 +23,11 @@ const suite = Mocha.Suite.create(mocha.suite, 'Noir end to end test'); suite.timeout(60 * 20e3); //20mins -function getCircuit(noirSource: string): CompiledProgram { - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code('main.nr', noirSource); - - // We're ignoring this in the resolver but pass in something sensible. - const result = compile('main.nr', undefined, undefined, sourceMap); +async function getCircuit(projectPath: string) { + const fm = createFileManager('/'); + await fm.writeFile('./src/main.nr', await getFile(`${projectPath}/src/main.nr`)); + await fm.writeFile('./Nargo.toml', await getFile(`${projectPath}/Nargo.toml`)); + const result = await compile(fm); if (!('program' in result)) { throw new Error('Compilation failed'); } @@ -51,11 +41,9 @@ test_cases.forEach((testInfo) => { const base_relative_path = '../../../../..'; const test_case = testInfo.case; - const noir_source = await getFile(`${base_relative_path}/${test_case}/src/main.nr`); - - let noir_program: CompiledProgram; + let noir_program; try { - noir_program = getCircuit(noir_source); + noir_program = await getCircuit(`${base_relative_path}/${test_case}`); expect(noir_program, 'Compile output ').to.be.an('object'); } catch (e) { @@ -66,7 +54,7 @@ test_cases.forEach((testInfo) => { const backend = new BarretenbergBackend(noir_program); const program = new Noir(noir_program, backend); - const prover_toml = await getFile(`${base_relative_path}/${test_case}/Prover.toml`); + const prover_toml = await new Response(await getFile(`${base_relative_path}/${test_case}/Prover.toml`)).text(); const inputs: InputMap = TOML.parse(prover_toml) as InputMap; // JS Proving diff --git a/compiler/integration-tests/test/browser/recursion.test.ts b/compiler/integration-tests/test/browser/recursion.test.ts index faa317b2c3c..80199de5701 100644 --- a/compiler/integration-tests/test/browser/recursion.test.ts +++ b/compiler/integration-tests/test/browser/recursion.test.ts @@ -2,59 +2,46 @@ import { expect } from '@esm-bundle/chai'; import { TEST_LOG_LEVEL } from '../environment.js'; import { Logger } from 'tslog'; -import newCompiler, { - CompiledProgram, - PathToFileSourceMap, - compile, - init_log_level as compilerLogLevel, -} from '@noir-lang/noir_wasm'; import { acvm, abi, Noir } from '@noir-lang/noir_js'; import * as TOML from 'smol-toml'; import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { getFile } from './utils.js'; import { Field, InputMap } from '@noir-lang/noirc_abi'; +import { createFileManager, compile } from '@noir-lang/noir_wasm'; const logger = new Logger({ name: 'test', minLevel: TEST_LOG_LEVEL }); const { default: initACVM } = acvm; const { default: newABICoder } = abi; -await newCompiler(); await newABICoder(); await initACVM(); -compilerLogLevel('INFO'); - const base_relative_path = '../../../../..'; const circuit_main = 'test_programs/execution_success/assert_statement'; const circuit_recursion = 'compiler/integration-tests/circuits/recursion'; -function getCircuit(noirSource: string): CompiledProgram { - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code('main.nr', noirSource); - const result = compile('main.nr', undefined, undefined, sourceMap); +async function getCircuit(projectPath: string) { + const fm = createFileManager('/'); + await fm.writeFile('./src/main.nr', await getFile(`${projectPath}/src/main.nr`)); + await fm.writeFile('./Nargo.toml', await getFile(`${projectPath}/Nargo.toml`)); + const result = await compile(fm); if (!('program' in result)) { throw new Error('Compilation failed'); } - return result.program; } describe('It compiles noir program code, receiving circuit bytes and abi object.', () => { - let circuit_main_source; let circuit_main_toml; - let circuit_recursion_source; before(async () => { - circuit_main_source = await getFile(`${base_relative_path}/${circuit_main}/src/main.nr`); - circuit_main_toml = await getFile(`${base_relative_path}/${circuit_main}/Prover.toml`); - - circuit_recursion_source = await getFile(`${base_relative_path}/${circuit_recursion}/src/main.nr`); + circuit_main_toml = await new Response(await getFile(`${base_relative_path}/${circuit_main}/Prover.toml`)).text(); }); it('Should generate valid inner proof for correct input, then verify proof within a proof', async () => { - const main_program = getCircuit(circuit_main_source); + const main_program = await getCircuit(`${base_relative_path}/${circuit_main}`); const main_inputs: InputMap = TOML.parse(circuit_main_toml) as InputMap; const main_backend = new BarretenbergBackend(main_program); @@ -79,12 +66,11 @@ describe('It compiles noir program code, receiving circuit bytes and abi object. proof: proofAsFields, public_inputs: [main_inputs.y as Field], key_hash: vkHash, - input_aggregation_object: ['0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], }; logger.debug('recursion_inputs', recursion_inputs); - const recursion_program = await getCircuit(circuit_recursion_source); + const recursion_program = await getCircuit(`${base_relative_path}/${circuit_recursion}`); const recursion_backend = new BarretenbergBackend(recursion_program); diff --git a/compiler/integration-tests/test/browser/utils.ts b/compiler/integration-tests/test/browser/utils.ts index 35588407193..a5e3fe1bc61 100644 --- a/compiler/integration-tests/test/browser/utils.ts +++ b/compiler/integration-tests/test/browser/utils.ts @@ -1,8 +1,8 @@ -export async function getFile(file_path: string): Promise { +export async function getFile(file_path: string): Promise> { const file_url = new URL(file_path, import.meta.url); const response = await fetch(file_url); if (!response.ok) throw new Error('Network response was not OK'); - return await response.text(); + return response.body as ReadableStream; } diff --git a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts index 6c20d44882b..9cdd80edc15 100644 --- a/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts +++ b/compiler/integration-tests/test/node/onchain_recursive_verification.test.ts @@ -2,35 +2,35 @@ import { expect } from 'chai'; import { ethers } from 'hardhat'; import { readFileSync } from 'node:fs'; -import { resolve } from 'path'; +import { resolve, join } from 'path'; import toml from 'toml'; -import { - compile, - CompiledProgram, - init_log_level as compilerLogLevel, - PathToFileSourceMap, -} from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; -import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { Field, InputMap } from '@noir-lang/noirc_abi'; -compilerLogLevel('INFO'); +import { compile, createFileManager } from '@noir-lang/noir_wasm'; it(`smart contract can verify a recursive proof`, async () => { - const innerSourcePath = resolve(`../../test_programs/execution_success/assert_statement/src/main.nr`); - const sourceMapInnerProgram = new PathToFileSourceMap(); - sourceMapInnerProgram.add_source_code(innerSourcePath, readFileSync(innerSourcePath, 'utf-8')); - const innerProgram = ( - compile(innerSourcePath, undefined, undefined, sourceMapInnerProgram) as { program: CompiledProgram } - ).program; - - const recursionSourcePath = resolve(`./circuits/recursion/src/main.nr`); - const sourceMapRecursionProgram = new PathToFileSourceMap(); - sourceMapRecursionProgram.add_source_code(recursionSourcePath, readFileSync(recursionSourcePath, 'utf-8')); - const recursionProgram = ( - compile(recursionSourcePath, undefined, undefined, sourceMapRecursionProgram) as { program: CompiledProgram } - ).program; + const basePath = resolve(join(__dirname, '../../../../')); + const fm = createFileManager(basePath); + const innerCompilationResult = await compile( + fm, + join(basePath, './test_programs/execution_success/assert_statement'), + ); + if (!('program' in innerCompilationResult)) { + throw new Error('Compilation failed'); + } + const innerProgram = innerCompilationResult.program; + + const recursionCompilationResult = await compile( + fm, + join(basePath, './compiler/integration-tests/circuits/recursion'), + ); + if (!('program' in recursionCompilationResult)) { + throw new Error('Compilation failed'); + } + const recursionProgram = recursionCompilationResult.program; // Intermediate proof @@ -38,8 +38,9 @@ it(`smart contract can verify a recursive proof`, async () => { const inner = new Noir(innerProgram); const inner_prover_toml = readFileSync( - resolve(`../../test_programs/execution_success/assert_statement/Prover.toml`), + join(basePath, `./test_programs/execution_success/assert_statement/Prover.toml`), ).toString(); + const inner_inputs = toml.parse(inner_prover_toml); const { witness: main_witness } = await inner.execute(inner_inputs); @@ -71,10 +72,7 @@ it(`smart contract can verify a recursive proof`, async () => { const contract = await ethers.deployContract('contracts/recursion.sol:UltraVerifier', []); - const result = await contract.verify.staticCall( - recursion_proof.proof, - flattenPublicInputs(recursion_proof.publicInputs), - ); + const result = await contract.verify.staticCall(recursion_proof.proof, recursion_proof.publicInputs); expect(result).to.be.true; }); diff --git a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts index 5b3d0e2d337..d870956ea7a 100644 --- a/compiler/integration-tests/test/node/smart_contract_verifier.test.ts +++ b/compiler/integration-tests/test/node/smart_contract_verifier.test.ts @@ -5,11 +5,10 @@ import { readFileSync } from 'node:fs'; import { resolve } from 'path'; import toml from 'toml'; -import { PathToFileSourceMap, compile, init_log_level as compilerLogLevel } from '@noir-lang/noir_wasm'; import { Noir } from '@noir-lang/noir_js'; -import { BarretenbergBackend, flattenPublicInputs } from '@noir-lang/backend_barretenberg'; +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; -compilerLogLevel('INFO'); +import { compile, createFileManager } from '@noir-lang/noir_wasm'; const test_cases = [ { @@ -31,11 +30,8 @@ test_cases.forEach((testInfo) => { const base_relative_path = '../..'; const test_case = testInfo.case; - const noirSourcePath = resolve(`${base_relative_path}/${test_case}/src/main.nr`); - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code(noirSourcePath, readFileSync(noirSourcePath, 'utf-8')); - - const compileResult = compile(noirSourcePath, undefined, undefined, sourceMap); + const fm = createFileManager(resolve(`${base_relative_path}/${test_case}`)); + const compileResult = await compile(fm); if (!('program' in compileResult)) { throw new Error('Compilation failed'); } @@ -61,7 +57,7 @@ test_cases.forEach((testInfo) => { const contract = await ethers.deployContract(testInfo.compiled, []); - const result = await contract.verify(proofData.proof, flattenPublicInputs(proofData.publicInputs)); + const result = await contract.verify(proofData.proof, proofData.publicInputs); expect(result).to.be.true; }); diff --git a/compiler/noirc_driver/Cargo.toml b/compiler/noirc_driver/Cargo.toml index e5a837e6822..eb9650e8aec 100644 --- a/compiler/noirc_driver/Cargo.toml +++ b/compiler/noirc_driver/Cargo.toml @@ -21,6 +21,7 @@ iter-extended.workspace = true fm.workspace = true serde.workspace = true fxhash.workspace = true -rust-embed = "6.6.0" +rust-embed.workspace = true +tracing.workspace = true aztec_macros = { path = "../../aztec_macros" } diff --git a/compiler/noirc_driver/build.rs b/compiler/noirc_driver/build.rs index 6bef7f1fda7..73a56142075 100644 --- a/compiler/noirc_driver/build.rs +++ b/compiler/noirc_driver/build.rs @@ -1,4 +1,5 @@ const GIT_COMMIT: &&str = &"GIT_COMMIT"; +use std::path::Path; fn main() { // Only use build_data if the environment variable isn't set @@ -8,4 +9,21 @@ fn main() { build_data::set_GIT_DIRTY(); build_data::no_debug_rebuilds(); } + + let stdlib_src_dir = Path::new("../../noir_stdlib/"); + rerun_if_stdlib_changes(stdlib_src_dir); +} + +/// Expects that the given directory is an existing path +fn rerun_if_stdlib_changes(directory: &Path) { + for entry in std::fs::read_dir(directory).unwrap() { + let path = entry.unwrap().path(); + + if path.is_dir() { + rerun_if_stdlib_changes(&path); + } else { + // Tell Cargo that if the given file changes, to rerun this build script. + println!("cargo:rerun-if-changed={}", path.to_string_lossy()); + } + } } diff --git a/compiler/noirc_driver/src/abi_gen.rs b/compiler/noirc_driver/src/abi_gen.rs index 9d0d64b6300..e546cd822b7 100644 --- a/compiler/noirc_driver/src/abi_gen.rs +++ b/compiler/noirc_driver/src/abi_gen.rs @@ -33,7 +33,7 @@ pub(super) fn compute_function_abi( ) -> (Vec, Option) { let func_meta = context.def_interner.function_meta(func_id); - let (parameters, return_type) = func_meta.into_function_signature(); + let (parameters, return_type) = func_meta.function_signature(); let parameters = into_abi_params(context, parameters); let return_type = return_type.map(|typ| AbiType::from_type(context, &typ)); (parameters, return_type) diff --git a/compiler/noirc_driver/src/contract.rs b/compiler/noirc_driver/src/contract.rs index ae55d239cf3..4d6d57ba9b6 100644 --- a/compiler/noirc_driver/src/contract.rs +++ b/compiler/noirc_driver/src/contract.rs @@ -26,7 +26,7 @@ pub enum ContractFunctionType { Unconstrained, } -#[derive(Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize)] pub struct CompiledContract { pub noir_version: String, diff --git a/compiler/noirc_driver/src/debug.rs b/compiler/noirc_driver/src/debug.rs index 144e636b534..5e309398cc5 100644 --- a/compiler/noirc_driver/src/debug.rs +++ b/compiler/noirc_driver/src/debug.rs @@ -31,14 +31,12 @@ pub(crate) fn filter_relevant_files( let mut file_map = BTreeMap::new(); for file_id in files_with_debug_symbols { - let file_source = file_manager.fetch_file(file_id).source(); + let file_path = file_manager.path(file_id).expect("file should exist"); + let file_source = file_manager.fetch_file(file_id).expect("file should exist"); file_map.insert( file_id, - DebugFile { - source: file_source.to_string(), - path: file_manager.path(file_id).to_path_buf(), - }, + DebugFile { source: file_source.to_string(), path: file_path.to_path_buf() }, ); } file_map diff --git a/compiler/noirc_driver/src/lib.rs b/compiler/noirc_driver/src/lib.rs index 7a3753c26f6..1d0e4d4ef66 100644 --- a/compiler/noirc_driver/src/lib.rs +++ b/compiler/noirc_driver/src/lib.rs @@ -4,7 +4,7 @@ #![warn(clippy::semicolon_if_nothing_returned)] use clap::Args; -use fm::FileId; +use fm::{FileId, FileManager}; use iter_extended::vecmap; use noirc_abi::{AbiParameter, AbiType, ContractEvent}; use noirc_errors::{CustomDiagnostic, FileDiagnostic}; @@ -19,6 +19,7 @@ use noirc_frontend::monomorphization::{monomorphize, monomorphize_debug}; use noirc_frontend::node_interner::FuncId; use serde::{Deserialize, Serialize}; use std::path::Path; +use tracing::info; mod abi_gen; mod contract; @@ -46,6 +47,10 @@ pub const NOIR_ARTIFACT_VERSION_STRING: &str = #[derive(Args, Clone, Debug, Default, Serialize, Deserialize)] pub struct CompileOptions { + /// Force a full recompilation. + #[arg(long = "force")] + pub force_compile: bool, + /// Emit debug information for the intermediate SSA IR #[arg(long, hide = true)] pub show_ssa: bool, @@ -73,6 +78,10 @@ pub struct CompileOptions { #[arg(long, hide = true)] pub disable_macros: bool, + /// Outputs the monomorphized IR to stdout for debugging + #[arg(long, hide = true)] + pub show_monomorphized: bool, + /// Insert debug symbols to inspect variables #[arg(long)] pub instrument_debug: bool, @@ -91,17 +100,40 @@ pub type ErrorsAndWarnings = Vec; /// Helper type for connecting a compilation artifact to the errors or warnings which were produced during compilation. pub type CompilationResult = Result<(T, Warnings), ErrorsAndWarnings>; -/// Adds the file from the file system at `Path` to the crate graph as a root file -pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { +/// Helper method to return a file manager instance with the stdlib already added +/// +/// TODO: This should become the canonical way to create a file manager and +/// TODO if we use a File manager trait, we can move file manager into this crate +/// TODO as a module +pub fn file_manager_with_stdlib(root: &Path) -> FileManager { + let mut file_manager = FileManager::new(root); + + add_stdlib_source_to_file_manager(&mut file_manager); + + file_manager +} + +/// Adds the source code for the stdlib into the file manager +fn add_stdlib_source_to_file_manager(file_manager: &mut FileManager) { // Add the stdlib contents to the file manager, since every package automatically has a dependency // on the stdlib. For other dependencies, we read the package.Dependencies file to add their file // contents to the file manager. However since the dependency on the stdlib is implicit, we need // to manually add it here. let stdlib_paths_with_source = stdlib::stdlib_paths_with_source(); for (path, source) in stdlib_paths_with_source { - context.file_manager.add_file_with_source_canonical_path(Path::new(&path), source); + file_manager.add_file_with_source_canonical_path(Path::new(&path), source); } + // Adds the synthetic debug module for instrumentation into the file manager + let path_to_debug_lib_file = Path::new(DEBUG_CRATE_NAME).join("lib.nr"); + file_manager.add_file_with_contents(&path_to_debug_lib_file, &create_prologue_program(8)); +} + +/// Adds the file from the file system at `Path` to the crate graph as a root file +/// +/// Note: This methods adds the stdlib as a dependency to the crate. +/// This assumes that the stdlib has already been added to the file manager. +pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { let path_to_std_lib_file = Path::new(STD_CRATE_NAME).join("lib.nr"); let std_file_id = context .file_manager @@ -112,8 +144,8 @@ pub fn prepare_crate(context: &mut Context, file_name: &Path) -> CrateId { let path_to_debug_lib_file = Path::new(DEBUG_CRATE_NAME).join("lib.nr"); let debug_file_id = context .file_manager - .add_file_with_contents(&path_to_debug_lib_file, &create_prologue_program(8)) - .unwrap(); + .name_to_id(path_to_debug_lib_file) + .expect("debug module is expected to be present"); let debug_crate_id = context.crate_graph.add_crate(debug_file_id); let root_file_id = context.file_manager.name_to_id(file_name.to_path_buf()).unwrap_or_else(|| panic!("files are expected to be added to the FileManager before reaching the compiler file_path: {file_name:?}")); @@ -159,6 +191,7 @@ pub fn add_dep( /// /// This returns a (possibly empty) vector of any warnings found on success. /// On error, this returns a non-empty vector of warnings and error messages, with at least one error. +#[tracing::instrument(level = "trace", skip(context))] pub fn check_crate( context: &mut Context, crate_id: CrateId, @@ -203,7 +236,6 @@ pub fn compile_main( crate_id: CrateId, options: &CompileOptions, cached_program: Option, - force_compile: bool, ) -> CompilationResult { let (_, mut warnings) = check_crate(context, crate_id, options.deny_warnings, options.disable_macros)?; @@ -217,8 +249,9 @@ pub fn compile_main( vec![err] })?; - let compiled_program = compile_no_check(context, options, main, cached_program, force_compile) - .map_err(FileDiagnostic::from)?; + let compiled_program = + compile_no_check(context, options, main, cached_program, options.force_compile) + .map_err(FileDiagnostic::from)?; let compilation_warnings = vecmap(compiled_program.warnings.clone(), FileDiagnostic::from); if options.deny_warnings && !compilation_warnings.is_empty() { return Err(compilation_warnings); @@ -372,6 +405,7 @@ fn compile_contract_inner( /// Compile the current crate using `main_function` as the entrypoint. /// /// This function assumes [`check_crate`] is called beforehand. +#[tracing::instrument(level = "trace", skip_all, fields(function_name = context.function_name(&main_function)))] pub fn compile_no_check( context: &mut Context, options: &CompileOptions, @@ -391,6 +425,9 @@ pub fn compile_no_check( let hash = fxhash::hash64(&program); let hashes_match = cached_program.as_ref().map_or(false, |program| program.hash == hash); + if options.show_monomorphized { + println!("{program}"); + } // If user has specified that they want to see intermediate steps printed then we should // force compilation even if the program hasn't changed. @@ -398,6 +435,7 @@ pub fn compile_no_check( force_compile || options.print_acir || options.show_brillig || options.show_ssa; if !force_compile && hashes_match { + info!("Program matches existing artifact, returning early"); return Ok(cached_program.expect("cache must exist for hashes to match")); } let visibility = program.return_visibility; diff --git a/compiler/noirc_driver/tests/contracts.rs b/compiler/noirc_driver/tests/contracts.rs new file mode 100644 index 00000000000..c3041292352 --- /dev/null +++ b/compiler/noirc_driver/tests/contracts.rs @@ -0,0 +1,42 @@ +use std::path::Path; + +use fm::FileId; +use noirc_driver::{file_manager_with_stdlib, prepare_crate, CompileOptions, ErrorsAndWarnings}; +use noirc_errors::CustomDiagnostic; +use noirc_frontend::hir::{def_map::parse_file, Context}; + +#[test] +fn reject_crates_containing_multiple_contracts() -> Result<(), ErrorsAndWarnings> { + let source = " +contract Foo {} + +contract Bar {}"; + + let root = Path::new(""); + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source.to_owned()).expect( + "Adding source buffer to file manager should never fail when file manager is empty", + ); + let parsed_files = file_manager + .as_file_map() + .all_file_ids() + .map(|&file_id| (file_id, parse_file(&file_manager, file_id))) + .collect(); + + let mut context = Context::new(file_manager, parsed_files); + let root_crate_id = prepare_crate(&mut context, file_name); + + let errors = + noirc_driver::compile_contract(&mut context, root_crate_id, &CompileOptions::default()) + .unwrap_err(); + + assert_eq!( + errors, + vec![CustomDiagnostic::from_message("Packages are limited to a single contract") + .in_file(FileId::default())], + "stdlib is producing warnings" + ); + + Ok(()) +} diff --git a/compiler/noirc_driver/tests/stdlib_warnings.rs b/compiler/noirc_driver/tests/stdlib_warnings.rs new file mode 100644 index 00000000000..6f437621123 --- /dev/null +++ b/compiler/noirc_driver/tests/stdlib_warnings.rs @@ -0,0 +1,32 @@ +use std::path::Path; + +use noirc_driver::{file_manager_with_stdlib, prepare_crate, ErrorsAndWarnings}; +use noirc_frontend::hir::{def_map::parse_file, Context}; + +#[test] +fn stdlib_does_not_produce_constant_warnings() -> Result<(), ErrorsAndWarnings> { + // We use a minimal source file so that if stdlib produces warnings then we can expect these warnings to _always_ + // be emitted. + let source = "fn main() {}"; + + let root = Path::new(""); + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source.to_owned()).expect( + "Adding source buffer to file manager should never fail when file manager is empty", + ); + let parsed_files = file_manager + .as_file_map() + .all_file_ids() + .map(|&file_id| (file_id, parse_file(&file_manager, file_id))) + .collect(); + + let mut context = Context::new(file_manager, parsed_files); + let root_crate_id = prepare_crate(&mut context, file_name); + + let ((), warnings) = noirc_driver::check_crate(&mut context, root_crate_id, false, false)?; + + assert_eq!(warnings, Vec::new(), "stdlib is producing warnings"); + + Ok(()) +} diff --git a/compiler/noirc_errors/Cargo.toml b/compiler/noirc_errors/Cargo.toml index 8e30c39be35..da18399971e 100644 --- a/compiler/noirc_errors/Cargo.toml +++ b/compiler/noirc_errors/Cargo.toml @@ -16,3 +16,7 @@ chumsky.workspace = true noirc_printable_type.workspace = true serde.workspace = true serde_with = "3.2.0" +tracing.workspace = true +flate2.workspace = true +serde_json.workspace = true +base64.workspace = true \ No newline at end of file diff --git a/compiler/noirc_errors/src/debug_info.rs b/compiler/noirc_errors/src/debug_info.rs index 30b4a7ce997..63e0a645dc9 100644 --- a/compiler/noirc_errors/src/debug_info.rs +++ b/compiler/noirc_errors/src/debug_info.rs @@ -2,14 +2,25 @@ use acvm::acir::circuit::OpcodeLocation; use acvm::compiler::AcirTransformationMap; use fm::FileId; +use base64::Engine; +use flate2::read::DeflateDecoder; +use flate2::write::DeflateEncoder; +use flate2::Compression; +use serde::Deserializer; +use serde::Serializer; use serde_with::serde_as; use serde_with::DisplayFromStr; -use std::collections::{BTreeMap, HashMap}; +use std::collections::BTreeMap; +use std::collections::HashMap; +use std::io::Read; +use std::io::Write; use std::mem; use crate::Location; use noirc_printable_type::PrintableType; -use serde::{Deserialize, Serialize}; +use serde::{ + de::Error as DeserializationError, ser::Error as SerializationError, Deserialize, Serialize, +}; pub type Variables = Vec<(u32, (String, u32))>; pub type Types = Vec<(u32, PrintableType)>; @@ -52,6 +63,7 @@ impl DebugInfo { /// The [`OpcodeLocation`]s are generated with the ACIR, but passing the ACIR through a transformation step /// renders the old `OpcodeLocation`s invalid. The AcirTransformationMap is able to map the old `OpcodeLocation` to the new ones. /// Note: One old `OpcodeLocation` might have transformed into more than one new `OpcodeLocation`. + #[tracing::instrument(level = "trace", skip(self, update_map))] pub fn update_acir(&mut self, update_map: AcirTransformationMap) { let old_locations = mem::take(&mut self.locations); @@ -106,4 +118,40 @@ impl DebugInfo { .filter_map(|call_stack| call_stack.last().map(|location| location.file)) .collect() } + + pub fn serialize_compressed_base64_json( + debug_info: &DebugInfo, + s: S, + ) -> Result + where + S: Serializer, + { + let json_str = serde_json::to_string(debug_info).map_err(S::Error::custom)?; + + let mut encoder = DeflateEncoder::new(Vec::new(), Compression::default()); + encoder.write_all(json_str.as_bytes()).map_err(S::Error::custom)?; + let compressed_data = encoder.finish().map_err(S::Error::custom)?; + + let encoded_b64 = base64::prelude::BASE64_STANDARD.encode(compressed_data); + s.serialize_str(&encoded_b64) + } + + pub fn deserialize_compressed_base64_json<'de, D>( + deserializer: D, + ) -> Result + where + D: Deserializer<'de>, + { + let encoded_b64: String = Deserialize::deserialize(deserializer)?; + + let compressed_data = + base64::prelude::BASE64_STANDARD.decode(encoded_b64).map_err(D::Error::custom)?; + + let mut decoder = DeflateDecoder::new(&compressed_data[..]); + let mut decompressed_data = Vec::new(); + decoder.read_to_end(&mut decompressed_data).map_err(D::Error::custom)?; + + let json_str = String::from_utf8(decompressed_data).map_err(D::Error::custom)?; + serde_json::from_str(&json_str).map_err(D::Error::custom) + } } diff --git a/compiler/noirc_evaluator/Cargo.toml b/compiler/noirc_evaluator/Cargo.toml index 933ec2b300c..a8f0e8d83a9 100644 --- a/compiler/noirc_evaluator/Cargo.toml +++ b/compiler/noirc_evaluator/Cargo.toml @@ -17,3 +17,4 @@ thiserror.workspace = true num-bigint = "0.4" im = { version = "15.1", features = ["serde"] } serde.workspace = true +tracing.workspace = true \ No newline at end of file diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs index b71033e0206..7a88ea52688 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_black_box.rs @@ -44,6 +44,19 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: Blake2s expects one array argument and one array result") } } + BlackBoxFunc::Blake3 => { + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = + (function_arguments, function_results) + { + let message_vector = convert_array_or_vector(brillig_context, message, bb_func); + brillig_context.black_box_op_instruction(BlackBoxOp::Blake3 { + message: message_vector.to_heap_vector(vec![HeapValueType::Simple]), + output: result_array.to_heap_array(vec![HeapValueType::Simple]), + }); + } else { + unreachable!("ICE: Blake3 expects one array argument and one array result") + } + } BlackBoxFunc::Keccak256 => { if let ( [message, BrilligVariable::Simple(array_size)], @@ -61,17 +74,18 @@ pub(crate) fn convert_black_box_call( unreachable!("ICE: Keccak256 expects message, message size and result array") } } - BlackBoxFunc::HashToField128Security => { - if let ([message], [BrilligVariable::Simple(result_register)]) = + BlackBoxFunc::Keccakf1600 => { + if let ([message], [BrilligVariable::BrilligArray(result_array)]) = (function_arguments, function_results) { - let message_vector = convert_array_or_vector(brillig_context, message, bb_func); - brillig_context.black_box_op_instruction(BlackBoxOp::HashToField128Security { - message: message_vector.to_heap_vector(vec![HeapValueType::Simple]), - output: *result_register, + let state_vector = convert_array_or_vector(brillig_context, message, bb_func); + + brillig_context.black_box_op_instruction(BlackBoxOp::Keccakf1600 { + message: state_vector.to_heap_vector(vec![HeapValueType::Simple]), + output: result_array.to_heap_array(vec![HeapValueType::Simple]), }); } else { - unreachable!("ICE: HashToField128Security expects one array argument and one register result") + unreachable!("ICE: Keccakf1600 expects one array argument and one array result") } } BlackBoxFunc::EcdsaSecp256k1 => { @@ -116,6 +130,7 @@ pub(crate) fn convert_black_box_call( ) } } + BlackBoxFunc::PedersenCommitment => { if let ( [message, BrilligVariable::Simple(domain_separator)], @@ -184,7 +199,54 @@ pub(crate) fn convert_black_box_call( ) } } - _ => unimplemented!("ICE: Black box function {:?} is not implemented", bb_func), + BlackBoxFunc::EmbeddedCurveAdd => { + if let ( + [BrilligVariable::Simple(input1_x), BrilligVariable::Simple(input1_y), BrilligVariable::Simple(input2_x), BrilligVariable::Simple(input2_y)], + [BrilligVariable::BrilligArray(result_array)], + ) = (function_arguments, function_results) + { + brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveAdd { + input1_x: *input1_x, + input1_y: *input1_y, + input2_x: *input2_x, + input2_y: *input2_y, + result: result_array.to_heap_array(vec![HeapValueType::Simple]), + }); + } else { + unreachable!( + "ICE: EmbeddedCurveAdd expects four register arguments and one array result" + ) + } + } + BlackBoxFunc::EmbeddedCurveDouble => { + if let ( + [BrilligVariable::Simple(input1_x), BrilligVariable::Simple(input1_y)], + [BrilligVariable::BrilligArray(result_array)], + ) = (function_arguments, function_results) + { + brillig_context.black_box_op_instruction(BlackBoxOp::EmbeddedCurveDouble { + input1_x: *input1_x, + input1_y: *input1_y, + result: result_array.to_heap_array(vec![HeapValueType::Simple]), + }); + } else { + unreachable!( + "ICE: EmbeddedCurveAdd expects two register arguments and one array result" + ) + } + } + BlackBoxFunc::AND => { + unreachable!("ICE: `BlackBoxFunc::AND` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::XOR => { + unreachable!("ICE: `BlackBoxFunc::XOR` calls should be transformed into a `BinaryOp`") + } + BlackBoxFunc::RANGE => unreachable!( + "ICE: `BlackBoxFunc::RANGE` calls should be transformed into a `Instruction::Cast`" + ), + BlackBoxFunc::RecursiveAggregation => unimplemented!( + "ICE: `BlackBoxFunc::RecursiveAggregation` is not implemented by the Brillig VM" + ), } } diff --git a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs index 400c30f73dd..37c03ddbb97 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_gen/brillig_block.rs @@ -524,7 +524,7 @@ impl<'block> BrilligBlock<'block> { unreachable!("unsupported function call type {:?}", dfg[*func]) } }, - Instruction::Truncate { value, .. } => { + Instruction::Truncate { value, bit_size, .. } => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -533,9 +533,13 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.brillig_context.truncate_instruction(destination_register, source_register); + self.brillig_context.truncate_instruction( + destination_register, + source_register, + *bit_size, + ); } - Instruction::Cast(value, target_type) => { + Instruction::Cast(value, _) => { let result_ids = dfg.instruction_results(instruction_id); let destination_register = self.variables.define_register_variable( self.function_context, @@ -544,12 +548,7 @@ impl<'block> BrilligBlock<'block> { dfg, ); let source_register = self.convert_ssa_register_value(*value, dfg); - self.convert_cast( - destination_register, - source_register, - target_type, - &dfg.type_of_value(*value), - ); + self.convert_cast(destination_register, source_register); } Instruction::ArrayGet { array, index } => { let result_ids = dfg.instruction_results(instruction_id); @@ -1095,43 +1094,11 @@ impl<'block> BrilligBlock<'block> { /// Converts an SSA cast to a sequence of Brillig opcodes. /// Casting is only necessary when shrinking the bit size of a numeric value. - fn convert_cast( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_type: &Type, - source_type: &Type, - ) { - fn numeric_to_bit_size(typ: &NumericType) -> u32 { - match typ { - NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => *bit_size, - NumericType::NativeField => FieldElement::max_num_bits(), - } - } - // Casting is only valid for numeric types - // This should be checked by the frontend, so we panic if this is the case - let (source_numeric_type, target_numeric_type) = match (source_type, target_type) { - (Type::Numeric(source_numeric_type), Type::Numeric(target_numeric_type)) => { - (source_numeric_type, target_numeric_type) - } - _ => unimplemented!("The cast operation is only valid for integers."), - }; - let source_bit_size = numeric_to_bit_size(source_numeric_type); - let target_bit_size = numeric_to_bit_size(target_numeric_type); - // Casting from a larger bit size to a smaller bit size (narrowing cast) - // requires a cast instruction. - // If its a widening cast, ie casting from a smaller bit size to a larger bit size - // we simply put a mov instruction as a no-op - // - // Field elements by construction always have the largest bit size - // This means that casting to a Field element, will always be a widening cast - // and therefore a no-op. Conversely, casting from a Field element - // will always be a narrowing cast and therefore a cast instruction - if source_bit_size > target_bit_size { - self.brillig_context.cast_instruction(destination, source, target_bit_size); - } else { - self.brillig_context.mov_instruction(destination, source); - } + fn convert_cast(&mut self, destination: RegisterIndex, source: RegisterIndex) { + // We assume that `source` is a valid `target_type` as it's expected that a truncate instruction was emitted + // to ensure this is the case. + + self.brillig_context.mov_instruction(destination, source); } /// Converts the Binary instruction into a sequence of Brillig opcodes. diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs index 7fc097fc307..d1fc7b59e11 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir.rs @@ -687,10 +687,29 @@ impl BrilligContext { &mut self, destination_of_truncated_value: RegisterIndex, value_to_truncate: RegisterIndex, + bit_size: u32, ) { - // Effectively a no-op because brillig already has implicit truncation on integer - // operations. We need only copy the value to it's destination. - self.mov_instruction(destination_of_truncated_value, value_to_truncate); + self.debug_show.truncate_instruction( + destination_of_truncated_value, + value_to_truncate, + bit_size, + ); + assert!( + bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, + "tried to truncate to a bit size greater than allowed {bit_size}" + ); + + // The brillig VM performs all arithmetic operations modulo 2**bit_size + // So to truncate any value to a target bit size we can just issue a no-op arithmetic operation + // With bit size equal to target_bit_size + let zero_register = self.make_constant(Value::from(FieldElement::zero())); + self.binary_instruction( + value_to_truncate, + zero_register, + destination_of_truncated_value, + BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size }, + ); + self.deallocate_register(zero_register); } /// Emits a stop instruction @@ -761,36 +780,6 @@ impl BrilligContext { self.deallocate_register(scratch_register_j); } - /// Emits a modulo instruction against 2**target_bit_size - /// - /// Integer arithmetic in Brillig is currently constrained to 127 bit integers. - /// We restrict the cast operation, so that integer types over 127 bits - /// cannot be created. - pub(crate) fn cast_instruction( - &mut self, - destination: RegisterIndex, - source: RegisterIndex, - target_bit_size: u32, - ) { - self.debug_show.cast_instruction(destination, source, target_bit_size); - assert!( - target_bit_size <= BRILLIG_INTEGER_ARITHMETIC_BIT_SIZE, - "tried to cast to a bit size greater than allowed {target_bit_size}" - ); - - // The brillig VM performs all arithmetic operations modulo 2**bit_size - // So to cast any value to a target bit size we can just issue a no-op arithmetic operation - // With bit size equal to target_bit_size - let zero_register = self.make_constant(Value::from(FieldElement::zero())); - self.binary_instruction( - source, - zero_register, - destination, - BrilligBinaryOp::Integer { op: BinaryIntOp::Add, bit_size: target_bit_size }, - ); - self.deallocate_register(zero_register); - } - /// Adds a unresolved external `Call` instruction to the bytecode. /// This calls into another function compiled into this brillig artifact. pub(crate) fn add_external_call_instruction(&mut self, func_label: T) { @@ -1056,6 +1045,7 @@ pub(crate) mod tests { BinaryIntOp, ForeignCallParam, ForeignCallResult, HeapVector, RegisterIndex, RegisterOrMemory, Value, }; + use acvm::brillig_vm::brillig::HeapValueType; use acvm::brillig_vm::{Registers, VMStatus, VM}; use acvm::{BlackBoxFunctionSolver, BlackBoxResolutionError, FieldElement}; @@ -1097,6 +1087,24 @@ pub(crate) mod tests { ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { Ok((4_u128.into(), 5_u128.into())) } + + fn ec_add( + &self, + _input1_x: &FieldElement, + _input1_y: &FieldElement, + _input2_x: &FieldElement, + _input2_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + panic!("Path not trodden by this test") + } + + fn ec_double( + &self, + _input_x: &FieldElement, + _input_y: &FieldElement, + ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { + panic!("Path not trodden by this test") + } } pub(crate) fn create_context() -> BrilligContext { @@ -1162,7 +1170,11 @@ pub(crate) mod tests { context.foreign_call_instruction( "make_number_sequence".into(), &[RegisterOrMemory::RegisterIndex(r_input_size)], - &[RegisterOrMemory::HeapVector(HeapVector { pointer: r_stack, size: r_output_size })], + &[RegisterOrMemory::HeapVector(HeapVector { + pointer: r_stack, + size: r_output_size, + value_types: vec![HeapValueType::Simple], + })], ); // push stack frame by r_returned_size context.memory_op(r_stack, r_output_size, r_stack, BinaryIntOp::Add); diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs index c77ec12ca49..caa64bd3b82 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/debug_show.rs @@ -326,7 +326,7 @@ impl DebugShow { } /// Debug function for cast_instruction - pub(crate) fn cast_instruction( + pub(crate) fn truncate_instruction( &self, destination: RegisterIndex, source: RegisterIndex, @@ -334,7 +334,7 @@ impl DebugShow { ) { debug_println!( self.enable_debug_trace, - " CAST {} FROM {} TO {} BITS", + " TRUNCATE {} FROM {} TO {} BITS", destination, source, target_bit_size @@ -350,16 +350,14 @@ impl DebugShow { BlackBoxOp::Keccak256 { message, output } => { debug_println!(self.enable_debug_trace, " KECCAK256 {} -> {}", message, output); } + BlackBoxOp::Keccakf1600 { message, output } => { + debug_println!(self.enable_debug_trace, " KECCAKF1600 {} -> {}", message, output); + } BlackBoxOp::Blake2s { message, output } => { debug_println!(self.enable_debug_trace, " BLAKE2S {} -> {}", message, output); } - BlackBoxOp::HashToField128Security { message, output } => { - debug_println!( - self.enable_debug_trace, - " HASH_TO_FIELD_128_SECURITY {} -> {}", - message, - output - ); + BlackBoxOp::Blake3 { message, output } => { + debug_println!(self.enable_debug_trace, " BLAKE3 {} -> {}", message, output); } BlackBoxOp::EcdsaSecp256k1 { hashed_msg, @@ -404,6 +402,26 @@ impl DebugShow { result ); } + BlackBoxOp::EmbeddedCurveAdd { input1_x, input1_y, input2_x, input2_y, result } => { + debug_println!( + self.enable_debug_trace, + " EMBEDDED_CURVE_ADD ({} {}) ({} {}) -> {}", + input1_x, + input1_y, + input2_x, + input2_y, + result + ); + } + BlackBoxOp::EmbeddedCurveDouble { input1_x, input1_y, result } => { + debug_println!( + self.enable_debug_trace, + " EMBEDDED_CURVE_DOUBLE ({} {}) -> {}", + input1_x, + input1_y, + result + ); + } BlackBoxOp::PedersenCommitment { inputs, domain_separator, output } => { debug_println!( self.enable_debug_trace, diff --git a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs index 48615988238..36ca414f38e 100644 --- a/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs +++ b/compiler/noirc_evaluator/src/brillig/brillig_ir/entry_point.rs @@ -209,7 +209,7 @@ impl BrilligContext { /// Adds the instructions needed to handle return parameters /// The runtime expects the results in the first `n` registers. /// Arrays are expected to be returned as pointers to the first element with all the nested arrays flattened. - /// However, the function called returns variables (that have extra data) and the returned arrays are unflattened. + /// However, the function called returns variables (that have extra data) and the returned arrays are deflattened. fn exit_point_instruction(&mut self, return_parameters: Vec) { // First, we allocate the registers that hold the returned variables from the function call. self.set_allocated_registers(vec![]); @@ -227,7 +227,7 @@ impl BrilligContext { BrilligParameter::Slice(..) => unreachable!("ICE: Cannot return slices"), }) .collect(); - // Now, we unflatten the returned arrays + // Now, we deflatten the returned arrays for (return_param, returned_variable) in return_parameters.iter().zip(&returned_variables) { if let BrilligParameter::Array(item_type, item_count) = return_param { if item_type.iter().any(|item| !matches!(item, BrilligParameter::Simple)) { diff --git a/compiler/noirc_evaluator/src/errors.rs b/compiler/noirc_evaluator/src/errors.rs index 42818e8b19d..2582c48555a 100644 --- a/compiler/noirc_evaluator/src/errors.rs +++ b/compiler/noirc_evaluator/src/errors.rs @@ -42,6 +42,8 @@ pub enum RuntimeError { UnknownLoopBound { call_stack: CallStack }, #[error("Argument is not constant")] AssertConstantFailed { call_stack: CallStack }, + #[error("Nested slices are not supported")] + NestedSlice { call_stack: CallStack }, } // We avoid showing the actual lhs and rhs since most of the time they are just 0 @@ -66,7 +68,7 @@ impl From for FileDiagnostic { let message = warning.to_string(); let (secondary_message, call_stack) = match warning { InternalWarning::ReturnConstant { call_stack } => { - ("constant value".to_string(), call_stack) + ("This variable contains a value which is constrained to be a constant. Consider removing this value as additional return values increase proving/verification time".to_string(), call_stack) }, InternalWarning::VerifyProof { call_stack } => { ("verify_proof(...) aggregates data for the verifier, the actual verification will be done when the full proof is verified using nargo verify. nargo prove may generate an invalid proof if bad data is used as input to verify_proof".to_string(), call_stack) @@ -85,7 +87,7 @@ impl From for FileDiagnostic { #[derive(Debug, PartialEq, Eq, Clone, Error, Serialize, Deserialize)] pub enum InternalWarning { - #[error("Returning a constant value is not allowed")] + #[error("Return variable contains a constant value")] ReturnConstant { call_stack: CallStack }, #[error("Calling std::verify_proof(...) does not verify a proof")] VerifyProof { call_stack: CallStack }, @@ -106,7 +108,7 @@ pub enum InternalError { #[error("ICE: Undeclared AcirVar")] UndeclaredAcirVar { call_stack: CallStack }, #[error("ICE: Expected {expected:?}, found {found:?}")] - UnExpected { expected: String, found: String, call_stack: CallStack }, + Unexpected { expected: String, found: String, call_stack: CallStack }, } impl RuntimeError { @@ -119,7 +121,7 @@ impl RuntimeError { | InternalError::MissingArg { call_stack, .. } | InternalError::NotAConstant { call_stack, .. } | InternalError::UndeclaredAcirVar { call_stack } - | InternalError::UnExpected { call_stack, .. }, + | InternalError::Unexpected { call_stack, .. }, ) | RuntimeError::FailedConstraint { call_stack, .. } | RuntimeError::IndexOutOfBounds { call_stack, .. } @@ -129,7 +131,8 @@ impl RuntimeError { | RuntimeError::UnknownLoopBound { call_stack } | RuntimeError::AssertConstantFailed { call_stack } | RuntimeError::IntegerOutOfBounds { call_stack, .. } - | RuntimeError::UnsupportedIntegerSize { call_stack, .. } => call_stack, + | RuntimeError::UnsupportedIntegerSize { call_stack, .. } + | RuntimeError::NestedSlice { call_stack, .. } => call_stack, } } } diff --git a/compiler/noirc_evaluator/src/ssa.rs b/compiler/noirc_evaluator/src/ssa.rs index 9a6e6db55a7..1d746365c51 100644 --- a/compiler/noirc_evaluator/src/ssa.rs +++ b/compiler/noirc_evaluator/src/ssa.rs @@ -23,6 +23,7 @@ use noirc_errors::debug_info::DebugInfo; use noirc_frontend::{ hir_def::function::FunctionSignature, monomorphization::ast::Program, Visibility, }; +use tracing::{span, Level}; use self::{acir_gen::GeneratedAcir, ssa_gen::Ssa}; @@ -43,7 +44,9 @@ pub(crate) fn optimize_into_acir( ) -> Result { let abi_distinctness = program.return_distinctness; - let ssa_builder = SsaBuilder::new(program, print_ssa_passes, force_brillig_output)? + let ssa_gen_span = span!(Level::TRACE, "ssa_generation"); + let ssa_gen_span_guard = ssa_gen_span.enter(); + let ssa = SsaBuilder::new(program, print_ssa_passes, force_brillig_output)? .run_pass(Ssa::defunctionalize, "After Defunctionalization:") .run_pass(Ssa::inline_functions, "After Inlining:") // Run mem2reg with the CFG separated into blocks @@ -60,18 +63,16 @@ pub(crate) fn optimize_into_acir( // Run mem2reg once more with the flattened CFG to catch any remaining loads/stores .run_pass(Ssa::mem2reg, "After Mem2Reg:") .run_pass(Ssa::fold_constants, "After Constant Folding:") - .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:"); + .run_pass(Ssa::dead_instruction_elimination, "After Dead Instruction Elimination:") + .run_pass(Ssa::bubble_up_constrains, "After Constraint Bubbling:") + .finish(); - let brillig = ssa_builder.to_brillig(print_brillig_trace); + let brillig = ssa.to_brillig(print_brillig_trace); - // Split off any passes the are not necessary for Brillig generation but are necessary for ACIR generation. - // We only need to fill out nested slices as we need to have a known length when dealing with memory operations - // in ACIR gen while this is not necessary in the Brillig IR. - let ssa = ssa_builder - .run_pass(Ssa::fill_internal_slices, "After Fill Internal Slice Dummy Data:") - .finish(); + drop(ssa_gen_span_guard); let last_array_uses = ssa.find_last_array_uses(); + ssa.into_acir(brillig, abi_distinctness, &last_array_uses) } @@ -79,6 +80,7 @@ pub(crate) fn optimize_into_acir( /// /// The output ACIR is is backend-agnostic and so must go through a transformation pass before usage in proof generation. #[allow(clippy::type_complexity)] +#[tracing::instrument(level = "trace", skip_all)] pub fn create_circuit( program: Program, enable_ssa_logging: bool, @@ -94,8 +96,8 @@ pub fn create_circuit( force_brillig_output, )?; let opcodes = generated_acir.take_opcodes(); + let current_witness_index = generated_acir.current_witness_index().0; let GeneratedAcir { - current_witness_index, return_witnesses, locations, input_witnesses, diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs index 712913841f3..cf7c6151110 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/acir_variable.rs @@ -9,6 +9,7 @@ use crate::ssa::ir::{instruction::Endian, types::NumericType}; use acvm::acir::circuit::brillig::{BrilligInputs, BrilligOutputs}; use acvm::acir::circuit::opcodes::{BlockId, MemOp}; use acvm::acir::circuit::Opcode; +use acvm::blackbox_solver; use acvm::brillig_vm::{brillig::Value, Registers, VMStatus, VM}; use acvm::{ acir::{ @@ -19,7 +20,6 @@ use acvm::{ }, FieldElement, }; -use acvm::{BlackBoxFunctionSolver, BlackBoxResolutionError}; use fxhash::FxHashMap as HashMap; use iter_extended::{try_vecmap, vecmap}; use num_bigint::BigUint; @@ -135,6 +135,13 @@ impl AcirContext { self.add_data(constant_data) } + /// Returns the constant represented by the given variable. + /// + /// Panics: if the variable does not represent a constant. + pub(crate) fn constant(&self, var: AcirVar) -> FieldElement { + self.vars[&var].as_constant().expect("ICE - expected the variable to be a constant value") + } + /// Adds a Variable to the context, whose exact value is resolved at /// runtime. pub(crate) fn add_variable(&mut self) -> AcirVar { @@ -307,6 +314,7 @@ impl AcirContext { inverse_code, vec![AcirValue::Var(var, AcirType::field())], vec![AcirType::field()], + true, )?; let inverted_var = Self::expect_one_var(results); @@ -368,9 +376,34 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { - let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; - Ok(outputs[0]) + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x ^ x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } else if lhs_expr.is_zero() { + // 0 ^ x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x ^ 0 == x + return Ok(lhs); + } + + let bit_size = typ.bit_size(); + if bit_size == 1 { + // Operands are booleans. + // + // a ^ b == a + b - 2*a*b + let sum = self.add_var(lhs, rhs)?; + let prod = self.mul_var(lhs, rhs)?; + self.add_mul_var(sum, -FieldElement::from(2_i128), prod) + } else { + let inputs = vec![AcirValue::Var(lhs, typ.clone()), AcirValue::Var(rhs, typ)]; + let outputs = self.black_box_function(BlackBoxFunc::XOR, inputs, 1)?; + Ok(outputs[0]) + } } /// Returns an `AcirVar` that is the AND result of `lhs` & `rhs`. @@ -380,6 +413,18 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + + if lhs_expr == rhs_expr { + // x & x == x + return Ok(lhs); + } else if lhs_expr.is_zero() || rhs_expr.is_zero() { + // x & 0 == 0 and 0 & x == 0 + let zero = self.add_constant(FieldElement::zero()); + return Ok(zero); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans. @@ -398,6 +443,16 @@ impl AcirContext { rhs: AcirVar, typ: AcirType, ) -> Result { + let lhs_expr = self.var_to_expression(lhs)?; + let rhs_expr = self.var_to_expression(rhs)?; + if lhs_expr.is_zero() { + // 0 | x == x + return Ok(rhs); + } else if rhs_expr.is_zero() { + // x | 0 == x + return Ok(lhs); + } + let bit_size = typ.bit_size(); if bit_size == 1 { // Operands are booleans @@ -407,15 +462,11 @@ impl AcirContext { self.sub_var(sum, mul) } else { // Implement OR in terms of AND - // max - ((max - a) AND (max -b)) - // Subtracting from max flips the bits, so this is effectively: - // (NOT a) NAND (NOT b) - let max = self.add_constant((1_u128 << bit_size) - 1); - let a = self.sub_var(max, lhs)?; - let b = self.sub_var(max, rhs)?; - let inputs = vec![AcirValue::Var(a, typ.clone()), AcirValue::Var(b, typ)]; - let outputs = self.black_box_function(BlackBoxFunc::AND, inputs, 1)?; - self.sub_var(max, outputs[0]) + // (NOT a) NAND (NOT b) => a OR b + let a = self.not_var(lhs, typ.clone())?; + let b = self.not_var(rhs, typ.clone())?; + let a_and_b = self.and_var(a, b, typ.clone())?; + self.not_var(a_and_b, typ) } } @@ -487,8 +538,19 @@ impl AcirContext { let lhs_data = self.vars[&lhs].clone(); let rhs_data = self.vars[&rhs].clone(); let result = match (lhs_data, rhs_data) { + // (x * 1) == (1 * x) == x + (AcirVarData::Const(constant), _) if constant.is_one() => rhs, + (_, AcirVarData::Const(constant)) if constant.is_one() => lhs, + + // (x * 0) == (0 * x) == 0 + (AcirVarData::Const(constant), _) | (_, AcirVarData::Const(constant)) + if constant.is_zero() => + { + self.add_constant(FieldElement::zero()) + } + (AcirVarData::Const(lhs_constant), AcirVarData::Const(rhs_constant)) => { - self.add_data(AcirVarData::Const(lhs_constant * rhs_constant)) + self.add_constant(lhs_constant * rhs_constant) } (AcirVarData::Witness(witness), AcirVarData::Const(constant)) | (AcirVarData::Const(constant), AcirVarData::Witness(witness)) => { @@ -654,6 +716,7 @@ impl AcirContext { AcirValue::Var(rhs, AcirType::unsigned(bit_size)), ], vec![AcirType::unsigned(max_q_bits), AcirType::unsigned(max_rhs_bits)], + true, )? .try_into() .expect("quotient only returns two values"); @@ -852,9 +915,7 @@ impl AcirContext { // Unsigned to signed: derive q and r from q1,r1 and the signs of lhs and rhs // Quotient sign is lhs sign * rhs sign, whose resulting sign bit is the XOR of the sign bits - let sign_sum = self.add_var(lhs_leading, rhs_leading)?; - let sign_prod = self.mul_var(lhs_leading, rhs_leading)?; - let q_sign = self.add_mul_var(sign_sum, -FieldElement::from(2_i128), sign_prod)?; + let q_sign = self.xor_var(lhs_leading, rhs_leading, AcirType::unsigned(1))?; let quotient = self.two_complement(q1, q_sign, bit_size)?; let remainder = self.two_complement(r1, lhs_leading, bit_size)?; @@ -936,6 +997,58 @@ impl AcirContext { Ok(remainder) } + /// Returns an 'AcirVar' containing the boolean value lhs diff<2^n, because the 2-complement representation keeps the ordering (e.g in 8 bits -1 is 255 > -2 = 254) + /// If not, lhs positive => diff > 2^n + /// and lhs negative => diff <= 2^n => diff < 2^n (because signs are not the same, so lhs != rhs and so diff != 2^n) + pub(crate) fn less_than_signed( + &mut self, + lhs: AcirVar, + rhs: AcirVar, + bit_count: u32, + ) -> Result { + let pow_last = self.add_constant(FieldElement::from(1_u128 << (bit_count - 1))); + let pow = self.add_constant(FieldElement::from(1_u128 << (bit_count))); + + // We check whether the inputs have same sign or not by computing the XOR of their bit sign + + // Predicate is always active as `pow_last` is known to be non-zero. + let one = self.add_constant(1_u128); + let lhs_sign = self.div_var( + lhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let rhs_sign = self.div_var( + rhs, + pow_last, + AcirType::NumericType(NumericType::Unsigned { bit_size: bit_count }), + one, + )?; + let same_sign = self.xor_var( + lhs_sign, + rhs_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + )?; + + // We compute the input difference + let no_underflow = self.add_var(lhs, pow)?; + let diff = self.sub_var(no_underflow, rhs)?; + + // We check the 'bit sign' of the difference + let diff_sign = self.less_than_var(diff, pow, bit_count + 1)?; + + // Then the result is simply diff_sign XOR same_sign (can be checked with a truth table) + self.xor_var( + diff_sign, + same_sign, + AcirType::NumericType(NumericType::Signed { bit_size: 1 }), + ) + } + /// Returns an `AcirVar` which will be `1` if lhs >= rhs /// and `0` otherwise. pub(crate) fn more_than_eq_var( @@ -943,7 +1056,6 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, max_bits: u32, - predicate: AcirVar, ) -> Result { // Returns a `Witness` that is constrained to be: // - `1` if lhs >= rhs @@ -968,6 +1080,7 @@ impl AcirContext { // // TODO: perhaps this should be a user error, instead of an assert assert!(max_bits + 1 < FieldElement::max_num_bits()); + let two_max_bits = self .add_constant(FieldElement::from(2_i128).pow(&FieldElement::from(max_bits as i128))); let diff = self.sub_var(lhs, rhs)?; @@ -997,13 +1110,11 @@ impl AcirContext { // let k = b - a // - 2^{max_bits} - k == q * 2^{max_bits} + r // - This is only the case when q == 0 and r == 2^{max_bits} - k - // - let (q, _) = self.euclidean_division_var( - comparison_evaluation, - two_max_bits, - max_bits + 1, - predicate, - )?; + + // Predicate is always active as we know `two_max_bits` is always non-zero. + let one = self.add_constant(1_u128); + let (q, _) = + self.euclidean_division_var(comparison_evaluation, two_max_bits, max_bits + 1, one)?; Ok(q) } @@ -1014,11 +1125,10 @@ impl AcirContext { lhs: AcirVar, rhs: AcirVar, bit_size: u32, - predicate: AcirVar, ) -> Result { // Flip the result of calling more than equal method to // compute less than. - let comparison = self.more_than_eq_var(lhs, rhs, bit_size, predicate)?; + let comparison = self.more_than_eq_var(lhs, rhs, bit_size)?; let one = self.add_constant(FieldElement::one()); self.sub_var(one, comparison) // comparison_negated @@ -1209,6 +1319,7 @@ impl AcirContext { generated_brillig: GeneratedBrillig, inputs: Vec, outputs: Vec, + attempt_execution: bool, ) -> Result, InternalError> { let b_inputs = try_vecmap(inputs, |i| match i { AcirValue::Var(var, _) => Ok(BrilligInputs::Single(self.var_to_expression(var)?)), @@ -1228,10 +1339,15 @@ impl AcirContext { // Optimistically try executing the brillig now, if we can complete execution they just return the results. // This is a temporary measure pending SSA optimizations being applied to Brillig which would remove constant-input opcodes (See #2066) - if let Some(brillig_outputs) = - self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) - { - return Ok(brillig_outputs); + // + // We do _not_ want to do this in the situation where the `main` function is unconstrained, as if execution succeeds + // the entire program will be replaced with witness constraints to its outputs. + if attempt_execution { + if let Some(brillig_outputs) = + self.execute_brillig(&generated_brillig.byte_code, &b_inputs, &outputs) + { + return Ok(brillig_outputs); + } } // Otherwise we must generate ACIR for it and execute at runtime. @@ -1414,7 +1530,7 @@ impl AcirContext { bit_size: u32, predicate: AcirVar, ) -> Result<(), RuntimeError> { - let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size, predicate)?; + let lhs_less_than_rhs = self.more_than_eq_var(rhs, lhs, bit_size)?; self.maybe_eq_predicate(lhs_less_than_rhs, predicate) } @@ -1595,41 +1711,6 @@ fn execute_brillig( code: &[BrilligOpcode], inputs: &[BrilligInputs], ) -> Option<(Registers, Vec)> { - struct NullBbSolver; - - impl BlackBoxFunctionSolver for NullBbSolver { - fn schnorr_verify( - &self, - _public_key_x: &FieldElement, - _public_key_y: &FieldElement, - _signature: &[u8], - _message: &[u8], - ) -> Result { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::SchnorrVerify)) - } - fn pedersen_commitment( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::PedersenCommitment)) - } - fn pedersen_hash( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::PedersenHash)) - } - fn fixed_base_scalar_mul( - &self, - _low: &FieldElement, - _high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), BlackBoxResolutionError> { - Err(BlackBoxResolutionError::Unsupported(BlackBoxFunc::FixedBaseScalarMul)) - } - } - // Set input values let mut input_register_values: Vec = Vec::with_capacity(inputs.len()); let mut input_memory: Vec = Vec::new(); @@ -1655,7 +1736,13 @@ fn execute_brillig( // Instantiate a Brillig VM given the solved input registers and memory, along with the Brillig bytecode. let input_registers = Registers::load(input_register_values); - let mut vm = VM::new(input_registers, input_memory, code, Vec::new(), &NullBbSolver); + let mut vm = VM::new( + input_registers, + input_memory, + code, + Vec::new(), + &blackbox_solver::StubbedBlackBoxSolver, + ); // Run the Brillig VM on these inputs, bytecode, etc! let vm_status = vm.process_opcodes(); diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs index f29d3c9ec05..efc64c5286e 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/acir_ir/generated_acir.rs @@ -28,9 +28,12 @@ use num_bigint::BigUint; /// The output of the Acir-gen pass pub(crate) struct GeneratedAcir { /// The next witness index that may be declared. + /// If witness index is `None` then we have not yet created a witness + /// and thus next witness index that be declared is zero. + /// This field is private should only ever be accessed through its getter and setter. /// /// Equivalent to acvm::acir::circuit::Circuit's field of the same name. - pub(crate) current_witness_index: u32, + current_witness_index: Option, /// The opcodes of which the compiled ACIR will comprise. opcodes: Vec, @@ -60,7 +63,7 @@ pub(crate) struct GeneratedAcir { impl GeneratedAcir { /// Returns the current witness index. pub(crate) fn current_witness_index(&self) -> Witness { - Witness(self.current_witness_index) + Witness(self.current_witness_index.unwrap_or(0)) } /// Adds a new opcode into ACIR. @@ -78,8 +81,12 @@ impl GeneratedAcir { /// Updates the witness index counter and returns /// the next witness index. pub(crate) fn next_witness_index(&mut self) -> Witness { - self.current_witness_index += 1; - Witness(self.current_witness_index) + if let Some(current_index) = self.current_witness_index { + self.current_witness_index.replace(current_index + 1); + } else { + self.current_witness_index = Some(0); + } + Witness(self.current_witness_index.expect("ICE: current_witness_index should exist")) } /// Converts [`Expression`] `expr` into a [`Witness`]. @@ -155,10 +162,7 @@ impl GeneratedAcir { BlackBoxFunc::Blake2s => { BlackBoxFuncCall::Blake2s { inputs: inputs[0].clone(), outputs } } - BlackBoxFunc::HashToField128Security => BlackBoxFuncCall::HashToField128Security { - inputs: inputs[0].clone(), - output: outputs[0], - }, + BlackBoxFunc::Blake3 => BlackBoxFuncCall::Blake3 { inputs: inputs[0].clone(), outputs }, BlackBoxFunc::SchnorrVerify => { BlackBoxFuncCall::SchnorrVerify { public_key_x: inputs[0][0], @@ -208,6 +212,18 @@ impl GeneratedAcir { high: inputs[1][0], outputs: (outputs[0], outputs[1]), }, + BlackBoxFunc::EmbeddedCurveAdd => BlackBoxFuncCall::EmbeddedCurveAdd { + input1_x: inputs[0][0], + input1_y: inputs[1][0], + input2_x: inputs[2][0], + input2_y: inputs[3][0], + outputs: (outputs[0], outputs[1]), + }, + BlackBoxFunc::EmbeddedCurveDouble => BlackBoxFuncCall::EmbeddedCurveDouble { + input_x: inputs[0][0], + input_y: inputs[1][0], + outputs: (outputs[0], outputs[1]), + }, BlackBoxFunc::Keccak256 => { let var_message_size = match inputs.to_vec().pop() { Some(var_message_size) => var_message_size[0], @@ -226,26 +242,15 @@ impl GeneratedAcir { outputs, } } - BlackBoxFunc::RecursiveAggregation => { - let has_previous_aggregation = self.opcodes.iter().any(|op| { - matches!( - op, - AcirOpcode::BlackBoxFuncCall(BlackBoxFuncCall::RecursiveAggregation { .. }) - ) - }); - - let input_aggregation_object = - if !has_previous_aggregation { None } else { Some(inputs[4].clone()) }; - - BlackBoxFuncCall::RecursiveAggregation { - verification_key: inputs[0].clone(), - proof: inputs[1].clone(), - public_inputs: inputs[2].clone(), - key_hash: inputs[3][0], - input_aggregation_object, - output_aggregation_object: outputs, - } + BlackBoxFunc::Keccakf1600 => { + BlackBoxFuncCall::Keccakf1600 { inputs: inputs[0].clone(), outputs } } + BlackBoxFunc::RecursiveAggregation => BlackBoxFuncCall::RecursiveAggregation { + verification_key: inputs[0].clone(), + proof: inputs[1].clone(), + public_inputs: inputs[2].clone(), + key_hash: inputs[3][0], + }, }; self.push_opcode(AcirOpcode::BlackBoxFuncCall(black_box_func_call)); @@ -372,7 +377,7 @@ impl GeneratedAcir { /// If `expr` is not zero, then the constraint system will /// fail upon verification. pub(crate) fn assert_is_zero(&mut self, expr: Expression) { - self.push_opcode(AcirOpcode::Arithmetic(expr)); + self.push_opcode(AcirOpcode::AssertZero(expr)); } /// Returns a `Witness` that is constrained to be: @@ -552,7 +557,7 @@ impl GeneratedAcir { // Constrain the network output to out_expr for (b, o) in b.iter().zip(out_expr) { - self.push_opcode(AcirOpcode::Arithmetic(b - o)); + self.push_opcode(AcirOpcode::AssertZero(b - o)); } Ok(()) } @@ -573,9 +578,11 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s + | BlackBoxFunc::Blake3 | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash - | BlackBoxFunc::HashToField128Security => None, + | BlackBoxFunc::PedersenHash => None, + + BlackBoxFunc::Keccakf1600 => Some(25), // Can only apply a range constraint to one // witness at a time. @@ -591,6 +598,10 @@ fn black_box_func_expected_input_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::FixedBaseScalarMul => Some(2), // Recursive aggregation has a variable number of inputs BlackBoxFunc::RecursiveAggregation => None, + // Addition over the embedded curve: input are coordinates (x1,y1) and (x2,y2) of the Grumpkin points + BlackBoxFunc::EmbeddedCurveAdd => Some(4), + // Doubling over the embedded curve: input is (x,y) coordinate of the point. + BlackBoxFunc::EmbeddedCurveDouble => Some(2), } } @@ -602,9 +613,11 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { // or the operation. BlackBoxFunc::AND | BlackBoxFunc::XOR => Some(1), // 32 byte hash algorithms - BlackBoxFunc::Keccak256 | BlackBoxFunc::SHA256 | BlackBoxFunc::Blake2s => Some(32), - // Hash to field returns a field element - BlackBoxFunc::HashToField128Security => Some(1), + BlackBoxFunc::Keccak256 + | BlackBoxFunc::SHA256 + | BlackBoxFunc::Blake2s + | BlackBoxFunc::Blake3 => Some(32), + BlackBoxFunc::Keccakf1600 => Some(25), // Pedersen commitment returns a point BlackBoxFunc::PedersenCommitment => Some(2), // Pedersen hash returns a field @@ -616,9 +629,11 @@ fn black_box_expected_output_size(name: BlackBoxFunc) -> Option { BlackBoxFunc::SchnorrVerify | BlackBoxFunc::EcdsaSecp256k1 | BlackBoxFunc::EcdsaSecp256r1 => Some(1), - // Output of fixed based scalar mul over the embedded curve + // Output of operations over the embedded curve // will be 2 field elements representing the point. - BlackBoxFunc::FixedBaseScalarMul => Some(2), + BlackBoxFunc::FixedBaseScalarMul + | BlackBoxFunc::EmbeddedCurveAdd + | BlackBoxFunc::EmbeddedCurveDouble => Some(2), // Recursive aggregation has a variable number of outputs BlackBoxFunc::RecursiveAggregation => None, } diff --git a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs index 751cd2c8d81..eca25905583 100644 --- a/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/acir_gen/mod.rs @@ -88,10 +88,6 @@ struct Context { /// a new BlockId max_block_id: u32, - /// Maps SSA array values to their slice size and any nested slices internal to the parent slice. - /// This enables us to maintain the slice structure of a slice when performing an array get. - slice_sizes: HashMap, Vec>, - data_bus: DataBus, } @@ -156,6 +152,7 @@ impl AcirValue { } impl Ssa { + #[tracing::instrument(level = "trace", skip_all)] pub(crate) fn into_acir( self, brillig: Brillig, @@ -201,7 +198,6 @@ impl Context { internal_memory_blocks: HashMap::default(), internal_mem_block_lengths: HashMap::default(), max_block_id: 0, - slice_sizes: HashMap::default(), data_bus: DataBus::default(), } } @@ -265,11 +261,14 @@ impl Context { let code = self.gen_brillig_for(main_func, &brillig)?; + // We specifically do not attempt execution of the brillig code being generated as this can result in it being + // replaced with constraints on witnesses to the program outputs. let output_values = self.acir_context.brillig( self.current_side_effects_enabled_var, code, inputs, outputs, + false, )?; let output_vars: Vec<_> = output_values .iter() @@ -290,7 +289,7 @@ impl Context { dfg: &DataFlowGraph, ) -> Result, RuntimeError> { // The first witness (if any) is the next one - let start_witness = self.acir_context.current_witness_index().0 + 1; + let start_witness = self.acir_context.current_witness_index().0; for param_id in params { let typ = dfg.type_of_value(*param_id); let value = self.convert_ssa_block_param(&typ)?; @@ -301,7 +300,7 @@ impl Context { let len = if matches!(typ, Type::Array(_, _)) { typ.flattened_size() } else { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Block params should be an array".to_owned(), found: format!("Instead got {:?}", typ), call_stack: self.acir_context.get_call_stack(), @@ -411,66 +410,14 @@ impl Context { self.define_result_var(dfg, instruction_id, result_acir_var); } Instruction::Constrain(lhs, rhs, assert_message) => { - let lhs = self.convert_value(*lhs, dfg); - let rhs = self.convert_value(*rhs, dfg); - - fn get_var_equality_assertions( - lhs: AcirValue, - rhs: AcirValue, - read_from_index: &mut impl FnMut(BlockId, usize) -> Result, - ) -> Result, InternalError> { - match (lhs, rhs) { - (AcirValue::Var(lhs, _), AcirValue::Var(rhs, _)) => Ok(vec![(lhs, rhs)]), - (AcirValue::Array(lhs_values), AcirValue::Array(rhs_values)) => { - let var_equality_assertions = lhs_values - .into_iter() - .zip(rhs_values) - .map(|(lhs, rhs)| { - get_var_equality_assertions(lhs, rhs, read_from_index) - }) - .collect::, _>>()? - .into_iter() - .flatten() - .collect(); - Ok(var_equality_assertions) - } - ( - AcirValue::DynamicArray(AcirDynamicArray { - block_id: lhs_block_id, - len, - .. - }), - AcirValue::DynamicArray(AcirDynamicArray { - block_id: rhs_block_id, - .. - }), - ) => try_vecmap(0..len, |i| { - let lhs_var = read_from_index(lhs_block_id, i)?; - let rhs_var = read_from_index(rhs_block_id, i)?; - Ok((lhs_var, rhs_var)) - }), - _ => { - unreachable!("ICE: lhs and rhs should be of the same type") - } - } - } - - let mut read_dynamic_array_index = - |block_id: BlockId, array_index: usize| -> Result { - let index_var = self.acir_context.add_constant(array_index); - - self.acir_context.read_from_memory(block_id, &index_var) - }; + let lhs = self.convert_numeric_value(*lhs, dfg)?; + let rhs = self.convert_numeric_value(*rhs, dfg)?; - for (lhs, rhs) in - get_var_equality_assertions(lhs, rhs, &mut read_dynamic_array_index)? - { - self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; - } + self.acir_context.assert_eq_var(lhs, rhs, assert_message.clone())?; } - Instruction::Cast(value_id, typ) => { - let result_acir_var = self.convert_ssa_cast(value_id, typ, dfg)?; - self.define_result_var(dfg, instruction_id, result_acir_var); + Instruction::Cast(value_id, _) => { + let acir_var = self.convert_numeric_value(*value_id, dfg)?; + self.define_result_var(dfg, instruction_id, acir_var); } Instruction::Call { func, arguments } => { let result_ids = dfg.instruction_results(instruction_id); @@ -488,7 +435,7 @@ impl Context { let outputs: Vec = vecmap(result_ids, |result_id| dfg.type_of_value(*result_id).into()); - let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs)?; + let output_values = self.acir_context.brillig(self.current_side_effects_enabled_var, code, inputs, outputs, true)?; // Compiler sanity check assert_eq!(result_ids.len(), output_values.len(), "ICE: The number of Brillig output values should match the result ids in SSA"); @@ -639,7 +586,7 @@ impl Context { Instruction::ArrayGet { array, index } => (array, index, None), Instruction::ArraySet { array, index, value, .. } => (array, index, Some(value)), _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArrayGet or ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -679,24 +626,18 @@ impl Context { ) -> Result { let index_const = dfg.get_numeric_constant(index); let value_type = dfg.type_of_value(array); - let (Type::Array(element_types, _) | Type::Slice(element_types)) = &value_type else { + // Compiler sanity checks + assert!( + !value_type.is_nested_slice(), + "ICE: Nested slice type has reached ACIR generation" + ); + let (Type::Array(_, _) | Type::Slice(_)) = &value_type else { unreachable!("ICE: expected array or slice type"); - }; - // TODO(#3188): Need to be able to handle constant index for slices to seriously reduce - // constraint sizes of nested slices - // This can only be done if we accurately flatten nested slices as otherwise we will reach - // index out of bounds errors. If the slice is already flat then we can treat them similarly to arrays. - if matches!(value_type, Type::Slice(_)) - && element_types.iter().any(|element| element.contains_slice_element()) - { - return Ok(false); - } - match self.convert_value(array, dfg) { AcirValue::Var(acir_var, _) => { - return Err(RuntimeError::InternalError(InternalError::UnExpected { + return Err(RuntimeError::InternalError(InternalError::Unexpected { expected: "an array value".to_string(), found: format!("{acir_var:?}"), call_stack: self.acir_context.get_call_stack(), @@ -784,24 +725,8 @@ impl Context { let mut dummy_predicate_index = predicate_index; // We must setup the dummy value to match the type of the value we wish to store - let slice_sizes = if store_type.contains_slice_element() { - self.compute_slice_sizes(store, None, dfg); - self.slice_sizes.get(&store).cloned().ok_or_else(|| { - InternalError::UnExpected { - expected: "Store value should have slice sizes computed".to_owned(), - found: "Missing key in slice sizes map".to_owned(), - call_stack: self.acir_context.get_call_stack(), - } - })? - } else { - vec![] - }; - let dummy = self.array_get_value( - &store_type, - block_id, - &mut dummy_predicate_index, - &slice_sizes, - )?; + let dummy = + self.array_get_value(&store_type, block_id, &mut dummy_predicate_index)?; Some(self.convert_array_set_store_value(&store_value, &dummy)?) } @@ -918,26 +843,12 @@ impl Context { } } - let value = if !res_typ.contains_slice_element() { - self.array_get_value(&res_typ, block_id, &mut var_index, &[])? - } else { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect("ICE: Array with slices should have associated slice sizes"); - - // The first max size is going to be the length of the parent slice - // As we are fetching from the parent slice we just want its internal - // slice sizes. - let slice_sizes = slice_sizes[1..].to_vec(); - - let value = self.array_get_value(&res_typ, block_id, &mut var_index, &slice_sizes)?; - - // Insert the resulting slice sizes - self.slice_sizes.insert(results[0], slice_sizes); - - value - }; + // Compiler sanity check + assert!( + !res_typ.contains_slice_element(), + "ICE: Nested slice result found during ACIR generation" + ); + let value = self.array_get_value(&res_typ, block_id, &mut var_index)?; self.define_result(dfg, instruction, value.clone()); @@ -949,7 +860,6 @@ impl Context { ssa_type: &Type, block_id: BlockId, var_index: &mut AcirVar, - slice_sizes: &[usize], ) -> Result { let one = self.acir_context.add_constant(FieldElement::one()); match ssa_type.clone() { @@ -967,33 +877,12 @@ impl Context { let mut values = Vector::new(); for _ in 0..len { for typ in element_types.as_ref() { - values.push_back(self.array_get_value( - typ, - block_id, - var_index, - slice_sizes, - )?); + values.push_back(self.array_get_value(typ, block_id, var_index)?); } } Ok(AcirValue::Array(values)) } - Type::Slice(element_types) => { - // It is not enough to execute this loop and simply pass the size from the parent definition. - // We need the internal sizes of each type in case of a nested slice. - let mut values = Vector::new(); - - let (current_size, new_sizes) = - slice_sizes.split_first().expect("should be able to split"); - - for _ in 0..*current_size { - for typ in element_types.as_ref() { - values - .push_back(self.array_get_value(typ, block_id, var_index, new_sizes)?); - } - } - Ok(AcirValue::Array(values)) - } - _ => unreachable!("ICE - expected an array or slice"), + _ => unreachable!("ICE: Expected an array or numeric but got {ssa_type:?}"), } } @@ -1012,7 +901,7 @@ impl Context { let array = match dfg[instruction] { Instruction::ArraySet { array, .. } => array, _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "Instruction should be an ArraySet".to_owned(), found: format!("Instead got {:?}", dfg[instruction]), call_stack: self.acir_context.get_call_stack(), @@ -1055,23 +944,6 @@ impl Context { self.array_set_value(&store_value, result_block_id, &mut var_index)?; - // Set new resulting array to have the same slice sizes as the instruction input - if let Type::Slice(element_types) = &array_typ { - let has_internal_slices = - element_types.as_ref().iter().any(|typ| typ.contains_slice_element()); - if has_internal_slices { - let slice_sizes = self - .slice_sizes - .get(&array_id) - .expect( - "ICE: Expected array with internal slices to have associated slice sizes", - ) - .clone(); - let results = dfg.instruction_results(instruction); - self.slice_sizes.insert(results[0], slice_sizes); - } - } - let element_type_sizes = if !can_omit_element_sizes_array(&array_typ) { Some(self.init_element_type_sizes_array(&array_typ, array_id, None, dfg)?) } else { @@ -1180,8 +1052,6 @@ impl Context { Type::Array(_, _) | Type::Slice(_) => { match &dfg[array_id] { Value::Array { array, .. } => { - self.compute_slice_sizes(array_id, None, dfg); - for (i, value) in array.iter().enumerate() { flat_elem_type_sizes.push( self.flattened_slice_size(*value, dfg) + flat_elem_type_sizes[i], @@ -1234,7 +1104,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "AcirValue::DynamicArray or AcirValue::Array" .to_owned(), found: format!("{:?}", array_acir_value), @@ -1245,7 +1115,7 @@ impl Context { } } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or instruction".to_owned(), found: format!("{:?}", &dfg[array_id]), call_stack: self.acir_context.get_call_stack(), @@ -1255,7 +1125,7 @@ impl Context { }; } _ => { - return Err(InternalError::UnExpected { + return Err(InternalError::Unexpected { expected: "array or slice".to_owned(), found: array_typ.to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1281,41 +1151,6 @@ impl Context { Ok(element_type_sizes) } - fn compute_slice_sizes( - &mut self, - current_array_id: ValueId, - parent_array: Option, - dfg: &DataFlowGraph, - ) { - let (array, typ) = match &dfg[current_array_id] { - Value::Array { array, typ } => (array, typ.clone()), - _ => return, - }; - - if !matches!(typ, Type::Slice(_)) { - return; - } - - let element_size = typ.element_size(); - let true_len = array.len() / element_size; - if let Some(parent_array) = parent_array { - let sizes_list = - self.slice_sizes.get_mut(&parent_array).expect("ICE: expected size list"); - sizes_list.push(true_len); - for value in array { - self.compute_slice_sizes(*value, Some(parent_array), dfg); - } - } else { - // This means the current_array_id is the parent array - // The slice sizes should follow the parent array's type structure - // thus we start our sizes list with the parent array size. - self.slice_sizes.insert(current_array_id, vec![true_len]); - for value in array { - self.compute_slice_sizes(*value, Some(current_array_id), dfg); - } - } - } - fn copy_dynamic_array( &mut self, source: BlockId, @@ -1520,12 +1355,12 @@ impl Context { ) -> Result { match self.convert_value(value_id, dfg) { AcirValue::Var(acir_var, _) => Ok(acir_var), - AcirValue::Array(array) => Err(InternalError::UnExpected { + AcirValue::Array(array) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: format!("{array:?}"), call_stack: self.acir_context.get_call_stack(), }), - AcirValue::DynamicArray(_) => Err(InternalError::UnExpected { + AcirValue::DynamicArray(_) => Err(InternalError::Unexpected { expected: "a numeric value".to_string(), found: "an array".to_string(), call_stack: self.acir_context.get_call_stack(), @@ -1577,12 +1412,12 @@ impl Context { // Note: that this produces unnecessary constraints when // this Eq instruction is being used for a constrain statement BinaryOp::Eq => self.acir_context.eq_var(lhs, rhs), - BinaryOp::Lt => self.acir_context.less_than_var( - lhs, - rhs, - bit_count, - self.current_side_effects_enabled_var, - ), + BinaryOp::Lt => match binary_type { + AcirType::NumericType(NumericType::Signed { .. }) => { + self.acir_context.less_than_signed(lhs, rhs, bit_count) + } + _ => self.acir_context.less_than_var(lhs, rhs, bit_count), + }, BinaryOp::Xor => self.acir_context.xor_var(lhs, rhs, binary_type), BinaryOp::And => self.acir_context.and_var(lhs, rhs, binary_type), BinaryOp::Or => self.acir_context.or_var(lhs, rhs, binary_type), @@ -1640,41 +1475,6 @@ impl Context { } } - /// Returns an `AcirVar` that is constrained to fit in the target type by truncating the input. - /// If the target cast is to a `NativeField`, no truncation is required so the cast becomes a - /// no-op. - fn convert_ssa_cast( - &mut self, - value_id: &ValueId, - typ: &Type, - dfg: &DataFlowGraph, - ) -> Result { - let (variable, incoming_type) = match self.convert_value(*value_id, dfg) { - AcirValue::Var(variable, typ) => (variable, typ), - AcirValue::DynamicArray(_) | AcirValue::Array(_) => { - unreachable!("Cast is only applied to numerics") - } - }; - let target_numeric = match typ { - Type::Numeric(numeric) => numeric, - _ => unreachable!("Can only cast to a numeric"), - }; - match target_numeric { - NumericType::NativeField => { - // Casting into a Field as a no-op - Ok(variable) - } - NumericType::Unsigned { bit_size } | NumericType::Signed { bit_size } => { - let max_bit_size = incoming_type.bit_size(); - if max_bit_size <= *bit_size { - // Incoming variable already fits into target bit size - this is a no-op - return Ok(variable); - } - self.acir_context.truncate_var(variable, *bit_size, max_bit_size) - } - } - } - /// Returns an `AcirVar`that is constrained to be result of the truncation. fn convert_ssa_truncate( &mut self, @@ -1751,6 +1551,9 @@ impl Context { Ok(Self::convert_vars_to_values(vars, dfg, result_ids)) } + Intrinsic::ApplyRangeConstraint => { + unreachable!("ICE: `Intrinsic::ApplyRangeConstraint` calls should be transformed into an `Instruction::RangeCheck`"); + } Intrinsic::ToRadix(endian) => { let field = self.convert_value(arguments[0], dfg).into_var()?; let radix = self.convert_value(arguments[1], dfg).into_var()?; @@ -1808,23 +1611,21 @@ impl Context { let slice_length = self.convert_value(arguments[0], dfg).into_var()?; let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice = self.convert_value(slice_contents, dfg); let mut new_elem_size = Self::flattened_value_size(&slice); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; let elements_to_push = &arguments[2..]; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push back elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push { - let element = self.convert_value(*elem, dfg); - - new_elem_size += Self::flattened_value_size(&element); - new_slice.push_back(element); - } + // We must directly push back elements for non-nested slices + for elem in elements_to_push { + let element = self.convert_value(*elem, dfg); + + new_elem_size += Self::flattened_value_size(&element); + new_slice.push_back(element); } // Increase the slice length by one to enable accessing more elements in the slice. @@ -1836,20 +1637,6 @@ impl Context { self.initialize_array(result_block_id, new_elem_size, Some(new_slice_val.clone()))?; // The previous slice length represents the index we want to write into. let mut var_index = slice_length; - // Dynamic arrays are represented as flat memory. We must flatten the user facing index - // to a flattened index that matches the complex slice structure. - if slice_typ.is_nested_slice() { - let element_size = slice_typ.element_size(); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - var_index = self.acir_context.mul_var(slice_length, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - } - // Write the elements we wish to push back directly. // The slice's underlying array value should already be filled with dummy data // to enable this write to be within bounds. @@ -1883,8 +1670,9 @@ impl Context { let (slice_contents, slice_typ, _) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice: AcirValue = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); + let slice: AcirValue = self.convert_value(slice_contents, dfg); let mut new_slice_size = Self::flattened_value_size(&slice); // Increase the slice length by one to enable accessing more elements in the slice. @@ -1896,33 +1684,14 @@ impl Context { let elements_to_push = &arguments[2..]; let mut elem_size = 0; - // We only fill internal slices for nested slices (a slice inside of a slice). - // So we must directly push front elements for slices which are not a nested slice. - if !slice_typ.is_nested_slice() { - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - elem_size += Self::flattened_value_size(&element); - new_slice.push_front(element); - } - new_slice_size += elem_size; - } else { - // We have already filled the appropriate dummy values for nested slice during SSA gen. - // We need to account for that we do not go out of bounds by removing dummy data as we - // push elements to the front of our slice. - // Using this strategy we are able to avoid dynamic writes like we do for a SlicePushBack. - for elem in elements_to_push.iter().rev() { - let element = self.convert_value(*elem, dfg); - - let elem_size = Self::flattened_value_size(&element); - // Have to pop based off of the flattened value size as we read the - // slice intrinsic as a flat list of AcirValue::Var - for _ in 0..elem_size { - new_slice.pop_back(); - } - new_slice.push_front(element); - } + // We must directly push front elements for non-nested slices + for elem in elements_to_push.iter().rev() { + let element = self.convert_value(*elem, dfg); + + elem_size += Self::flattened_value_size(&element); + new_slice.push_front(element); } + new_slice_size += elem_size; let new_slice_val = AcirValue::Array(new_slice.clone()); @@ -1964,55 +1733,16 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); - - let element_size = slice_typ.element_size(); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let mut popped_elements = Vec::new(); - // Fetch the values we are popping off of the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. - // - // The pop back operation results are of the format [slice length, slice contents, popped elements]. - // Thus, we look at the result ids at index 2 and onwards to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - popped_elements.push(elem); - } - } else { - // Fetch the slice sizes of the nested slice. - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - // Multiply the element size against the var index before fetching the flattened index - // This operation makes sure our user-facing slice index matches the strategy for indexing in SSA, - // which is how `get_flattened_index` expects its index input. - let element_size_var = self.acir_context.add_constant(element_size); - // We want to use an index one less than the slice length - var_index = self.acir_context.mul_var(var_index, element_size_var)?; - var_index = - self.get_flattened_index(&slice_typ, slice_contents, var_index, dfg)?; - - for res in &result_ids[2..] { - let elem = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - popped_elements.push(elem); - } + for res in &result_ids[2..] { + let elem = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + popped_elements.push(elem); } + let slice = self.convert_value(slice_contents, dfg); let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -2030,11 +1760,13 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; - let slice = self.convert_value(slice_contents, dfg); + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let one = self.acir_context.add_constant(FieldElement::one()); let new_slice_length = self.acir_context.sub_var(slice_length, one)?; + let slice = self.convert_value(slice_contents, dfg); + let mut new_slice = Vector::new(); self.slice_intrinsic_input(&mut new_slice, slice)?; @@ -2046,40 +1778,14 @@ impl Context { // Fetch the values we are popping off of the slice. // In the case of non-nested slice the logic is simple as we do not // need to account for the internal slice sizes or flattening the index. - // - // The pop front operation results are of the format [popped elements, slice length, slice contents]. - // Thus, we look at the result ids up to the element size to determine the type of each popped element. - if !slice_typ.is_nested_slice() { - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[..element_size] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut var_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[..element_size] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut var_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } + // It is expected that the `popped_elements_size` is the flattened size of the elements, // as the input slice should be a dynamic array which is represented by flat memory. new_slice = new_slice.slice(popped_elements_size..); @@ -2095,6 +1801,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let insert_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2143,19 +1850,11 @@ impl Context { let current_index = self.acir_context.add_constant(i); // Check that we are above the lower bound of the insertion index - let greater_eq_than_idx = self.acir_context.more_than_eq_var( - current_index, - flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let greater_eq_than_idx = + self.acir_context.more_than_eq_var(current_index, flat_user_index, 64)?; // Check that we are below the upper bound of the insertion index - let less_than_idx = self.acir_context.less_than_var( - current_index, - max_flat_user_index, - 64, - self.current_side_effects_enabled_var, - )?; + let less_than_idx = + self.acir_context.less_than_var(current_index, max_flat_user_index, 64)?; // Read from the original slice the value we want to insert into our new slice. // We need to make sure that we read the previous element when our current index is greater than insertion index. @@ -2208,7 +1907,6 @@ impl Context { } } - // let new_slice_val = AcirValue::Array(new_slice); let element_type_sizes = if !can_omit_element_sizes_array(&slice_typ) { Some(self.init_element_type_sizes_array( &slice_typ, @@ -2233,6 +1931,7 @@ impl Context { let (slice_contents, slice_typ, block_id) = self.check_array_is_initialized(arguments[1], dfg)?; + assert!(!slice_typ.is_nested_slice(), "ICE: Nested slice used in ACIR generation"); let slice = self.convert_value(slice_contents, dfg); let remove_index = self.convert_value(arguments[2], dfg).into_var()?; @@ -2261,8 +1960,6 @@ impl Context { self.get_flattened_index(&slice_typ, slice_contents, flat_remove_index, dfg)?; // Fetch the values we are remove from the slice. - // In the case of non-nested slice the logic is simple as we do not - // need to account for the internal slice sizes or flattening the index. // As we fetch the values we can determine the size of the removed values // which we will later use for writing the correct resulting slice. let mut popped_elements = Vec::new(); @@ -2270,36 +1967,12 @@ impl Context { // Set a temp index just for fetching from the original slice as `array_get_value` mutates // the index internally. let mut temp_index = flat_user_index; - if !slice_typ.is_nested_slice() { - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &[], - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } - } else { - let slice_sizes = self.slice_sizes.get(&slice_contents); - let mut slice_sizes = - slice_sizes.expect("ICE: should have slice sizes").clone(); - // We want to remove the parent size as we are fetching the child - slice_sizes.remove(0); - - for res in &result_ids[2..(2 + element_size)] { - let element = self.array_get_value( - &dfg.type_of_value(*res), - block_id, - &mut temp_index, - &slice_sizes, - )?; - let elem_size = Self::flattened_value_size(&element); - popped_elements_size += elem_size; - popped_elements.push(element); - } + for res in &result_ids[2..(2 + element_size)] { + let element = + self.array_get_value(&dfg.type_of_value(*res), block_id, &mut temp_index)?; + let elem_size = Self::flattened_value_size(&element); + popped_elements_size += elem_size; + popped_elements.push(element); } // Go through the entire slice argument and determine what value should be written to the new slice. @@ -2330,7 +2003,6 @@ impl Context { current_index, flat_user_index, 64, - self.current_side_effects_enabled_var, )?; let shifted_value_pred = @@ -2476,8 +2148,7 @@ impl Context { } } -// We can omit the element size array for arrays which have elements of size 1 and do not contain slices. -// TODO: remove restriction on size 1 elements. +// We can omit the element size array for arrays which don't contain arrays or slices. fn can_omit_element_sizes_array(array_typ: &Type) -> bool { if array_typ.contains_slice_element() { return false; @@ -2486,5 +2157,5 @@ fn can_omit_element_sizes_array(array_typ: &Type) -> bool { panic!("ICE: expected array type"); }; - types.len() == 1 && types[0].flattened_size() == 1 + !types.iter().any(|typ| typ.contains_an_array()) } diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs index 7f337089321..cbaeb2477d6 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/data_bus.rs @@ -77,14 +77,13 @@ impl DataBus { impl FunctionBuilder { /// Insert a value into a data bus builder fn add_to_data_bus(&mut self, value: ValueId, databus: &mut DataBusBuilder) { - assert!(databus.databus.is_none(), "initialising finalized call data"); + assert!(databus.databus.is_none(), "initializing finalized call data"); let typ = self.current_function.dfg[value].get_type().clone(); match typ { Type::Numeric(_) => { databus.values.push_back(value); databus.index += 1; } - Type::Reference(_) => unreachable!(), Type::Array(typ, len) => { assert!(typ.len() == 1, "unsupported composite type"); databus.map.insert(value, databus.index); @@ -98,8 +97,11 @@ impl FunctionBuilder { self.add_to_data_bus(element, databus); } } - Type::Slice(_) => unreachable!(), - Type::Function => unreachable!(), + Type::Reference(_) => { + unreachable!("Attempted to add invalid type (reference) to databus") + } + Type::Slice(_) => unreachable!("Attempted to add invalid type (slice) to databus"), + Type::Function => unreachable!("Attempted to add invalid type (function) to databus"), } } @@ -128,7 +130,7 @@ impl FunctionBuilder { /// Generate the data bus for call-data, based on the parameters of the entry block /// and a boolean vector telling which ones are call-data pub(crate) fn call_data_bus(&mut self, is_params_databus: Vec) -> DataBusBuilder { - //filter parameters of the first block that have call-data visilibity + //filter parameters of the first block that have call-data visibility let first_block = self.current_function.entry_block(); let params = self.current_function.dfg[first_block].parameters(); let mut databus_param = Vec::new(); diff --git a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs index 56a22fd4107..44be423be10 100644 --- a/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/function_builder/mod.rs @@ -255,6 +255,19 @@ impl FunctionBuilder { self.insert_instruction(Instruction::Constrain(lhs, rhs, assert_message), None); } + /// Insert a [`Instruction::RangeCheck`] instruction at the end of the current block. + pub(crate) fn insert_range_check( + &mut self, + value: ValueId, + max_bit_size: u32, + assert_message: Option, + ) { + self.insert_instruction( + Instruction::RangeCheck { value, max_bit_size, assert_message }, + None, + ); + } + /// Insert a call instruction at the end of the current block and return /// the results of the call. pub(crate) fn insert_call( @@ -266,15 +279,6 @@ impl FunctionBuilder { self.insert_instruction(Instruction::Call { func, arguments }, Some(result_types)).results() } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs - pub(crate) fn insert_shift_left(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { - let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); - let typ = self.current_function.dfg.type_of_value(lhs); - let pow = self.insert_cast(pow, typ); - self.insert_binary(lhs, BinaryOp::Mul, pow) - } - /// Insert ssa instructions which computes lhs << rhs by doing lhs*2^rhs /// and truncate the result to bit_size pub(crate) fn insert_wrapping_shift_left( @@ -289,8 +293,9 @@ impl FunctionBuilder { if let Some(rhs_constant) = self.current_function.dfg.get_numeric_constant(rhs) { // Happy case is that we know precisely by how many bits the the integer will // increase: lhs_bit_size + rhs - let (rhs_bit_size_pow_2, overflows) = - 2_u128.overflowing_pow(rhs_constant.to_u128() as u32); + let bit_shift_size = rhs_constant.to_u128() as u32; + + let (rhs_bit_size_pow_2, overflows) = 2_u128.overflowing_pow(bit_shift_size); if overflows { assert!(bit_size < 128, "ICE - shift left with big integers are not supported"); if bit_size < 128 { @@ -299,17 +304,19 @@ impl FunctionBuilder { } } let pow = self.numeric_constant(FieldElement::from(rhs_bit_size_pow_2), typ); - (bit_size + (rhs_constant.to_u128() as u32), pow) + + let max_lhs_bits = self.current_function.dfg.get_value_max_num_bits(lhs); + + (max_lhs_bits + bit_shift_size, pow) } else { // we use a predicate to nullify the result in case of overflow let bit_size_var = self.numeric_constant(FieldElement::from(bit_size as u128), typ.clone()); let overflow = self.insert_binary(rhs, BinaryOp::Lt, bit_size_var); - let one = self.numeric_constant(FieldElement::one(), Type::unsigned(1)); - let predicate = self.insert_binary(overflow, BinaryOp::Eq, one); - let predicate = self.insert_cast(predicate, typ.clone()); - - let pow = self.pow(base, rhs); + let predicate = self.insert_cast(overflow, typ.clone()); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); let pow = self.insert_cast(pow, typ); (FieldElement::max_num_bits(), self.insert_binary(predicate, BinaryOp::Mul, pow)) }; @@ -323,9 +330,16 @@ impl FunctionBuilder { } /// Insert ssa instructions which computes lhs >> rhs by doing lhs/2^rhs - pub(crate) fn insert_shift_right(&mut self, lhs: ValueId, rhs: ValueId) -> ValueId { + pub(crate) fn insert_shift_right( + &mut self, + lhs: ValueId, + rhs: ValueId, + bit_size: u32, + ) -> ValueId { let base = self.field_constant(FieldElement::from(2_u128)); - let pow = self.pow(base, rhs); + // we can safely cast to unsigned because overflow_checks prevent bit-shift with a negative value + let rhs_unsigned = self.insert_cast(rhs, Type::unsigned(bit_size)); + let pow = self.pow(base, rhs_unsigned); self.insert_binary(lhs, BinaryOp::Div, pow) } @@ -353,10 +367,12 @@ impl FunctionBuilder { let r_squared = self.insert_binary(r, BinaryOp::Mul, r); let a = self.insert_binary(r_squared, BinaryOp::Mul, lhs); let idx = self.field_constant(FieldElement::from((bit_size - i) as i128)); - let b = self.insert_array_get(rhs_bits, idx, Type::field()); + let b = self.insert_array_get(rhs_bits, idx, Type::bool()); + let not_b = self.insert_not(b); + let b = self.insert_cast(b, Type::field()); + let not_b = self.insert_cast(not_b, Type::field()); let r1 = self.insert_binary(a, BinaryOp::Mul, b); - let c = self.insert_binary(one, BinaryOp::Sub, b); - let r2 = self.insert_binary(c, BinaryOp::Mul, r_squared); + let r2 = self.insert_binary(r_squared, BinaryOp::Mul, not_b); r = self.insert_binary(r1, BinaryOp::Add, r2); } r diff --git a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs index abddbfb74c7..870b5e602f1 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/dfg.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/dfg.rs @@ -47,7 +47,7 @@ pub(crate) struct DataFlowGraph { constants: HashMap<(FieldElement, Type), ValueId>, /// Contains each function that has been imported into the current function. - /// Each function's Value::Function is uniqued here so any given FunctionId + /// A unique `ValueId` for each function's [`Value::Function`] is stored so any given FunctionId /// will always have the same ValueId within this function. functions: HashMap, @@ -57,7 +57,7 @@ pub(crate) struct DataFlowGraph { intrinsics: HashMap, /// Contains each foreign function that has been imported into the current function. - /// This map is used to ensure that the ValueId for any given foreign functôn is always + /// This map is used to ensure that the ValueId for any given foreign function is always /// represented by only 1 ValueId within this function. foreign_functions: HashMap, @@ -160,17 +160,37 @@ impl DataFlowGraph { call_stack: CallStack, ) -> InsertInstructionResult { use InsertInstructionResult::*; - match instruction.simplify(self, block, ctrl_typevars.clone()) { + match instruction.simplify(self, block, ctrl_typevars.clone(), &call_stack) { SimplifyResult::SimplifiedTo(simplification) => SimplifiedTo(simplification), SimplifyResult::SimplifiedToMultiple(simplification) => { SimplifiedToMultiple(simplification) } SimplifyResult::Remove => InstructionRemoved, - result @ (SimplifyResult::SimplifiedToInstruction(_) | SimplifyResult::None) => { - let instruction = result.instruction().unwrap_or(instruction); - let id = self.make_instruction(instruction, ctrl_typevars); - self.blocks[block].insert_instruction(id); - self.locations.insert(id, call_stack); + result @ (SimplifyResult::SimplifiedToInstruction(_) + | SimplifyResult::SimplifiedToInstructionMultiple(_) + | SimplifyResult::None) => { + let instructions = result.instructions().unwrap_or(vec![instruction]); + + if instructions.len() > 1 { + // There's currently no way to pass results from one instruction in `instructions` on to the next. + // We then restrict this to only support multiple instructions if they're all `Instruction::Constrain` + // as this instruction type does not have any results. + assert!( + instructions.iter().all(|instruction| matches!(instruction, Instruction::Constrain(..))), + "`SimplifyResult::SimplifiedToInstructionMultiple` only supports `Constrain` instructions" + ); + } + + let mut last_id = None; + + for instruction in instructions { + let id = self.make_instruction(instruction, ctrl_typevars.clone()); + self.blocks[block].insert_instruction(id); + self.locations.insert(id, call_stack.clone()); + last_id = Some(id); + } + + let id = last_id.expect("There should be at least 1 simplified instruction"); InsertInstructionResult::Results(id, self.instruction_results(id)) } } @@ -317,6 +337,25 @@ impl DataFlowGraph { self.values[value].get_type().clone() } + /// Returns the maximum possible number of bits that `value` can potentially be. + /// + /// Should `value` be a numeric constant then this function will return the exact number of bits required, + /// otherwise it will return the minimum number of bits based on type information. + pub(crate) fn get_value_max_num_bits(&self, value: ValueId) -> u32 { + match self[value] { + Value::Instruction { instruction, .. } => { + if let Instruction::Cast(original_value, _) = self[instruction] { + self.type_of_value(original_value).bit_size() + } else { + self.type_of_value(value).bit_size() + } + } + + Value::NumericConstant { constant, .. } => constant.num_bits(), + _ => self.type_of_value(value).bit_size(), + } + } + /// True if the type of this value is Type::Reference. /// Using this method over type_of_value avoids cloning the value's type. pub(crate) fn value_is_reference(&self, value: ValueId) -> bool { diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs index 628ad638e64..457fe41de93 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction.rs @@ -40,6 +40,7 @@ pub(crate) enum Intrinsic { SlicePopFront, SliceInsert, SliceRemove, + ApplyRangeConstraint, StrAsBytes, ToBits(Endian), ToRadix(Endian), @@ -61,6 +62,7 @@ impl std::fmt::Display for Intrinsic { Intrinsic::SliceInsert => write!(f, "slice_insert"), Intrinsic::SliceRemove => write!(f, "slice_remove"), Intrinsic::StrAsBytes => write!(f, "str_as_bytes"), + Intrinsic::ApplyRangeConstraint => write!(f, "apply_range_constraint"), Intrinsic::ToBits(Endian::Big) => write!(f, "to_be_bits"), Intrinsic::ToBits(Endian::Little) => write!(f, "to_le_bits"), Intrinsic::ToRadix(Endian::Big) => write!(f, "to_be_radix"), @@ -78,7 +80,7 @@ impl Intrinsic { /// If there are no side effects then the `Intrinsic` can be removed if the result is unused. pub(crate) fn has_side_effects(&self) -> bool { match self { - Intrinsic::AssertConstant => true, + Intrinsic::AssertConstant | Intrinsic::ApplyRangeConstraint => true, Intrinsic::Sort | Intrinsic::ArrayLen @@ -106,6 +108,7 @@ impl Intrinsic { "arraysort" => Some(Intrinsic::Sort), "array_len" => Some(Intrinsic::ArrayLen), "assert_constant" => Some(Intrinsic::AssertConstant), + "apply_range_constraint" => Some(Intrinsic::ApplyRangeConstraint), "slice_push_back" => Some(Intrinsic::SlicePushBack), "slice_push_front" => Some(Intrinsic::SlicePushFront), "slice_pop_back" => Some(Intrinsic::SlicePopBack), @@ -236,10 +239,7 @@ impl Instruction { // In ACIR, a division with a false predicate outputs (0,0), so it cannot replace another instruction unless they have the same predicate bin.operator != BinaryOp::Div } - Cast(_, _) | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, - - // Unclear why this instruction causes problems. - Truncate { .. } => false, + Cast(_, _) | Truncate { .. } | Not(_) | ArrayGet { .. } | ArraySet { .. } => true, // These either have side-effects or interact with memory Constrain(..) @@ -408,6 +408,7 @@ impl Instruction { dfg: &mut DataFlowGraph, block: BasicBlockId, ctrl_typevars: Option>, + call_stack: &CallStack, ) -> SimplifyResult { use SimplifyResult::*; match self { @@ -434,73 +435,11 @@ impl Instruction { } } Instruction::Constrain(lhs, rhs, msg) => { - if dfg.resolve(*lhs) == dfg.resolve(*rhs) { - // Remove trivial case `assert_eq(x, x)` - SimplifyResult::Remove + let constraints = decompose_constrain(*lhs, *rhs, msg.clone(), dfg); + if constraints.is_empty() { + Remove } else { - match (&dfg[dfg.resolve(*lhs)], &dfg[dfg.resolve(*rhs)]) { - ( - Value::NumericConstant { constant, typ }, - Value::Instruction { instruction, .. }, - ) - | ( - Value::Instruction { instruction, .. }, - Value::NumericConstant { constant, typ }, - ) if *typ == Type::bool() => { - match dfg[*instruction] { - Instruction::Binary(Binary { - lhs, - rhs, - operator: BinaryOp::Eq, - }) if constant.is_one() => { - // Replace an explicit two step equality assertion - // - // v2 = eq v0, u32 v1 - // constrain v2 == u1 1 - // - // with a direct assertion of equality between the two values - // - // v2 = eq v0, u32 v1 - // constrain v0 == v1 - // - // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it - // will likely be removed through dead instruction elimination. - - SimplifiedToInstruction(Instruction::Constrain( - lhs, - rhs, - msg.clone(), - )) - } - Instruction::Not(value) => { - // Replace an assertion that a not instruction is truthy - // - // v1 = not v0 - // constrain v1 == u1 1 - // - // with an assertion that the not instruction input is falsy - // - // v1 = not v0 - // constrain v0 == u1 0 - // - // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it - // will likely be removed through dead instruction elimination. - let reversed_constant = FieldElement::from(!constant.is_one()); - let reversed_constant = - dfg.make_constant(reversed_constant, Type::bool()); - SimplifiedToInstruction(Instruction::Constrain( - value, - reversed_constant, - msg.clone(), - )) - } - - _ => None, - } - } - - _ => None, - } + SimplifiedToInstructionMultiple(constraints) } } Instruction::ArrayGet { array, index } => { @@ -535,23 +474,50 @@ impl Instruction { let truncated = numeric_constant.to_u128() % integer_modulus; SimplifiedTo(dfg.make_constant(truncated.into(), typ)) } else if let Value::Instruction { instruction, .. } = &dfg[dfg.resolve(*value)] { - if let Instruction::Truncate { bit_size: src_bit_size, .. } = &dfg[*instruction] - { - // If we're truncating the value to fit into the same or larger bit size then this is a noop. - if src_bit_size <= bit_size && src_bit_size <= max_bit_size { - SimplifiedTo(*value) - } else { - None + match &dfg[*instruction] { + Instruction::Truncate { bit_size: src_bit_size, .. } => { + // If we're truncating the value to fit into the same or larger bit size then this is a noop. + if src_bit_size <= bit_size && src_bit_size <= max_bit_size { + SimplifiedTo(*value) + } else { + None + } } - } else { - None + + Instruction::Binary(Binary { + lhs, rhs, operator: BinaryOp::Div, .. + }) if dfg.is_constant(*rhs) => { + // If we're truncating the result of a division by a constant denominator, we can + // reason about the maximum bit size of the result and whether a truncation is necessary. + + let numerator_type = dfg.type_of_value(*lhs); + let max_numerator_bits = numerator_type.bit_size(); + + let divisor = dfg + .get_numeric_constant(*rhs) + .expect("rhs is checked to be constant."); + let divisor_bits = divisor.num_bits(); + + // 2^{max_quotient_bits} = 2^{max_numerator_bits} / 2^{divisor_bits} + // => max_quotient_bits = max_numerator_bits - divisor_bits + // + // In order for the truncation to be a noop, we then require `max_quotient_bits < bit_size`. + let max_quotient_bits = max_numerator_bits - divisor_bits; + if max_quotient_bits < *bit_size { + SimplifiedTo(*value) + } else { + None + } + } + + _ => None, } } else { None } } Instruction::Call { func, arguments } => { - simplify_call(*func, arguments, dfg, block, ctrl_typevars) + simplify_call(*func, arguments, dfg, block, ctrl_typevars, call_stack) } Instruction::EnableSideEffects { condition } => { if let Some(last) = dfg[block].instructions().last().copied() { @@ -583,6 +549,14 @@ impl Instruction { /// that value is returned. Otherwise None is returned. fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> SimplifyResult { use SimplifyResult::*; + let value = dfg.resolve(value); + + if let Value::Instruction { instruction, .. } = &dfg[value] { + if let Instruction::Cast(original_value, _) = &dfg[*instruction] { + return SimplifiedToInstruction(Instruction::Cast(*original_value, dst_typ.clone())); + } + } + if let Some(constant) = dfg.get_numeric_constant(value) { let src_typ = dfg.type_of_value(value); match (src_typ, dst_typ) { @@ -621,6 +595,129 @@ fn simplify_cast(value: ValueId, dst_typ: &Type, dfg: &mut DataFlowGraph) -> Sim } } +/// Try to decompose this constrain instruction. This constraint will be broken down such that it instead constrains +/// all the values which are used to compute the values which were being constrained. +fn decompose_constrain( + lhs: ValueId, + rhs: ValueId, + msg: Option, + dfg: &mut DataFlowGraph, +) -> Vec { + let lhs = dfg.resolve(lhs); + let rhs = dfg.resolve(rhs); + + if lhs == rhs { + // Remove trivial case `assert_eq(x, x)` + Vec::new() + } else { + match (&dfg[lhs], &dfg[rhs]) { + (Value::NumericConstant { constant, typ }, Value::Instruction { instruction, .. }) + | (Value::Instruction { instruction, .. }, Value::NumericConstant { constant, typ }) + if *typ == Type::bool() => + { + match dfg[*instruction] { + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Eq }) + if constant.is_one() => + { + // Replace an explicit two step equality assertion + // + // v2 = eq v0, u32 v1 + // constrain v2 == u1 1 + // + // with a direct assertion of equality between the two values + // + // v2 = eq v0, u32 v1 + // constrain v0 == v1 + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + + vec![Instruction::Constrain(lhs, rhs, msg)] + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Mul }) + if constant.is_one() && dfg.type_of_value(lhs) == Type::bool() => + { + // Replace an equality assertion on a boolean multiplication + // + // v2 = mul v0, v1 + // constrain v2 == u1 1 + // + // with a direct assertion that each value is equal to 1 + // + // v2 = mul v0, v1 + // constrain v0 == 1 + // constrain v1 == 1 + // + // This is due to the fact that for `v2` to be 1 then both `v0` and `v1` are 1. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let one = FieldElement::one(); + let one = dfg.make_constant(one, Type::bool()); + + [ + decompose_constrain(lhs, one, msg.clone(), dfg), + decompose_constrain(rhs, one, msg, dfg), + ] + .concat() + } + + Instruction::Binary(Binary { lhs, rhs, operator: BinaryOp::Or }) + if constant.is_zero() => + { + // Replace an equality assertion on an OR + // + // v2 = or v0, v1 + // constrain v2 == u1 0 + // + // with a direct assertion that each value is equal to 0 + // + // v2 = or v0, v1 + // constrain v0 == 0 + // constrain v1 == 0 + // + // This is due to the fact that for `v2` to be 0 then both `v0` and `v1` are 0. + // + // Note that this doesn't remove the value `v2` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let zero = FieldElement::zero(); + let zero = dfg.make_constant(zero, dfg.type_of_value(lhs)); + + [ + decompose_constrain(lhs, zero, msg.clone(), dfg), + decompose_constrain(rhs, zero, msg, dfg), + ] + .concat() + } + + Instruction::Not(value) => { + // Replace an assertion that a not instruction is truthy + // + // v1 = not v0 + // constrain v1 == u1 1 + // + // with an assertion that the not instruction input is falsy + // + // v1 = not v0 + // constrain v0 == u1 0 + // + // Note that this doesn't remove the value `v1` as it may be used in other instructions, but it + // will likely be removed through dead instruction elimination. + let reversed_constant = FieldElement::from(!constant.is_one()); + let reversed_constant = dfg.make_constant(reversed_constant, Type::bool()); + decompose_constrain(value, reversed_constant, msg, dfg) + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } + + _ => vec![Instruction::Constrain(lhs, rhs, msg)], + } + } +} + /// The possible return values for Instruction::return_types pub(crate) enum InstructionResultType { /// The result type of this instruction matches that of this operand @@ -1109,6 +1206,10 @@ pub(crate) enum SimplifyResult { /// Replace this function with an simpler but equivalent instruction. SimplifiedToInstruction(Instruction), + /// Replace this function with a set of simpler but equivalent instructions. + /// This is currently only to be used for [`Instruction::Constrain`]. + SimplifiedToInstructionMultiple(Vec), + /// Remove the instruction, it is unnecessary Remove, @@ -1117,9 +1218,10 @@ pub(crate) enum SimplifyResult { } impl SimplifyResult { - pub(crate) fn instruction(self) -> Option { + pub(crate) fn instructions(self) -> Option> { match self { - SimplifyResult::SimplifiedToInstruction(instruction) => Some(instruction), + SimplifyResult::SimplifiedToInstruction(instruction) => Some(vec![instruction]), + SimplifyResult::SimplifiedToInstructionMultiple(instructions) => Some(instructions), _ => None, } } diff --git a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs index 01bbd51f466..d1991abab37 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/instruction/call.rs @@ -31,6 +31,7 @@ pub(super) fn simplify_call( dfg: &mut DataFlowGraph, block: BasicBlockId, ctrl_typevars: Option>, + call_stack: &CallStack, ) -> SimplifyResult { let intrinsic = match &dfg[func] { Value::Intrinsic(intrinsic) => *intrinsic, @@ -222,6 +223,20 @@ pub(super) fn simplify_call( SimplifyResult::None } } + Intrinsic::ApplyRangeConstraint => { + let value = arguments[0]; + let max_bit_size = dfg.get_numeric_constant(arguments[1]); + if let Some(max_bit_size) = max_bit_size { + let max_bit_size = max_bit_size.to_u128() as u32; + SimplifyResult::SimplifiedToInstruction(Instruction::RangeCheck { + value, + max_bit_size, + assert_message: Some("call to assert_max_bit_size".to_owned()), + }) + } else { + SimplifyResult::None + } + } Intrinsic::BlackBox(bb_func) => simplify_black_box_func(bb_func, arguments, dfg), Intrinsic::Sort => simplify_sort(dfg, arguments), Intrinsic::AsField => { @@ -232,7 +247,24 @@ pub(super) fn simplify_call( SimplifyResult::SimplifiedToInstruction(instruction) } Intrinsic::FromField => { - let instruction = Instruction::Cast(arguments[0], ctrl_typevars.unwrap().remove(0)); + let incoming_type = Type::field(); + let target_type = ctrl_typevars.unwrap().remove(0); + + let truncate = Instruction::Truncate { + value: arguments[0], + bit_size: target_type.bit_size(), + max_bit_size: incoming_type.bit_size(), + }; + let truncated_value = dfg + .insert_instruction_and_results( + truncate, + block, + Some(vec![incoming_type]), + call_stack.clone(), + ) + .first(); + + let instruction = Instruction::Cast(truncated_value, target_type); SimplifyResult::SimplifiedToInstruction(instruction) } } @@ -364,6 +396,8 @@ fn simplify_black_box_func( match bb_func { BlackBoxFunc::SHA256 => simplify_hash(dfg, arguments, acvm::blackbox_solver::sha256), BlackBoxFunc::Blake2s => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake2s), + BlackBoxFunc::Blake3 => simplify_hash(dfg, arguments, acvm::blackbox_solver::blake3), + BlackBoxFunc::Keccakf1600 => SimplifyResult::None, //TODO(Guillaume) BlackBoxFunc::Keccak256 => { match (dfg.get_array_constant(arguments[0]), dfg.get_numeric_constant(arguments[1])) { (Some((input, _)), Some(num_bytes)) if array_is_constant(dfg, &input) => { @@ -383,18 +417,6 @@ fn simplify_black_box_func( _ => SimplifyResult::None, } } - BlackBoxFunc::HashToField128Security => match dfg.get_array_constant(arguments[0]) { - Some((input, _)) if array_is_constant(dfg, &input) => { - let input_bytes: Vec = to_u8_vec(dfg, input); - - let field = acvm::blackbox_solver::hash_to_field_128_security(&input_bytes) - .expect("Rust solvable black box function should not fail"); - - let field_constant = dfg.make_constant(field, Type::field()); - SimplifyResult::SimplifiedTo(field_constant) - } - _ => SimplifyResult::None, - }, BlackBoxFunc::EcdsaSecp256k1 => { simplify_signature(dfg, arguments, acvm::blackbox_solver::ecdsa_secp256k1_verify) @@ -406,7 +428,9 @@ fn simplify_black_box_func( BlackBoxFunc::FixedBaseScalarMul | BlackBoxFunc::SchnorrVerify | BlackBoxFunc::PedersenCommitment - | BlackBoxFunc::PedersenHash => { + | BlackBoxFunc::PedersenHash + | BlackBoxFunc::EmbeddedCurveAdd + | BlackBoxFunc::EmbeddedCurveDouble => { // Currently unsolvable here as we rely on an implementation in the backend. SimplifyResult::None } diff --git a/compiler/noirc_evaluator/src/ssa/ir/types.rs b/compiler/noirc_evaluator/src/ssa/ir/types.rs index bae06a805d0..f412def1e76 100644 --- a/compiler/noirc_evaluator/src/ssa/ir/types.rs +++ b/compiler/noirc_evaluator/src/ssa/ir/types.rs @@ -18,6 +18,28 @@ pub enum NumericType { NativeField, } +impl NumericType { + /// Returns the bit size of the provided numeric type. + pub(crate) fn bit_size(self: &NumericType) -> u32 { + match self { + NumericType::NativeField => FieldElement::max_num_bits(), + NumericType::Unsigned { bit_size } | NumericType::Signed { bit_size } => *bit_size, + } + } + + /// Returns true if the given Field value is within the numeric limits + /// for the current NumericType. + pub(crate) fn value_is_within_limits(self, field: FieldElement) -> bool { + match self { + NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => { + let max = 2u128.pow(bit_size) - 1; + field <= max.into() + } + NumericType::NativeField => true, + } + } +} + /// All types representable in the IR. #[derive(Clone, Debug, PartialEq, Eq, Hash, Ord, PartialOrd)] pub(crate) enum Type { @@ -68,6 +90,18 @@ impl Type { Type::Numeric(NumericType::NativeField) } + /// Returns the bit size of the provided numeric type. + /// + /// # Panics + /// + /// Panics if `self` is not a [`Type::Numeric`] + pub(crate) fn bit_size(&self) -> u32 { + match self { + Type::Numeric(numeric_type) => numeric_type.bit_size(), + other => panic!("bit_size: Expected numeric type, found {other}"), + } + } + /// Returns the size of the element type for this array/slice. /// The size of a type is defined as representing how many Fields are needed /// to represent the type. This is 1 for every primitive type, and is the number of fields @@ -105,7 +139,7 @@ impl Type { } pub(crate) fn is_nested_slice(&self) -> bool { - if let Type::Slice(element_types) = self { + if let Type::Slice(element_types) | Type::Array(element_types, _) = self { element_types.as_ref().iter().any(|typ| typ.contains_slice_element()) } else { false @@ -122,20 +156,6 @@ impl Type { } } -impl NumericType { - /// Returns true if the given Field value is within the numeric limits - /// for the current NumericType. - pub(crate) fn value_is_within_limits(self, field: FieldElement) -> bool { - match self { - NumericType::Signed { bit_size } | NumericType::Unsigned { bit_size } => { - let max = 2u128.pow(bit_size) - 1; - field <= max.into() - } - NumericType::NativeField => true, - } - } -} - /// Composite Types are essentially flattened struct or tuple types. /// Array types may have these as elements where each flattened field is /// included in the array sequentially. diff --git a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs b/compiler/noirc_evaluator/src/ssa/opt/array_use.rs index cfa97cee551..0bb8b0112b6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/array_use.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/array_use.rs @@ -14,6 +14,7 @@ impl Ssa { /// Map arrays with the last instruction that uses it /// For this we simply process all the instructions in execution order /// and update the map whenever there is a match + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn find_last_array_uses(&self) -> HashMap { let mut array_use = HashMap::default(); for func in self.functions.values() { diff --git a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs index cd3a509a62e..a3608f89612 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/assert_constant.rs @@ -21,6 +21,7 @@ impl Ssa { /// seen by loop unrolling. Furthermore, this pass cannot be a part of loop unrolling /// since we must go through every instruction to find all references to `assert_constant` /// while loop unrolling only touches blocks with loops in them. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn evaluate_assert_constant(mut self) -> Result { for function in self.functions.values_mut() { for block in function.reachable_blocks() { diff --git a/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs b/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs new file mode 100644 index 00000000000..8a903cbd87b --- /dev/null +++ b/compiler/noirc_evaluator/src/ssa/opt/bubble_up_constrains.rs @@ -0,0 +1,153 @@ +use std::collections::HashMap; + +use crate::ssa::{ + ir::instruction::{Instruction, InstructionId}, + ssa_gen::Ssa, +}; + +impl Ssa { + /// A simple SSA pass to go through each instruction and move every `Instruction::Constrain` to immediately + /// after when all of its inputs are available. + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) fn bubble_up_constrains(mut self) -> Ssa { + for function in self.functions.values_mut() { + for block in function.reachable_blocks() { + let instructions = function.dfg[block].take_instructions(); + let mut filtered_instructions = Vec::with_capacity(instructions.len()); + + // Multiple constrains can bubble up to sit under a single instruction. We want to maintain the ordering of these constraints, + // so we need to keep track of how many constraints are attached to a given instruction. + // Some assertions don't operate on instruction results, so we use Option so we also track the None case + let mut inserted_at_instruction: HashMap, usize> = + HashMap::with_capacity(instructions.len()); + + let dfg = &function.dfg; + for instruction in instructions { + let (lhs, rhs) = match dfg[instruction] { + Instruction::Constrain(lhs, rhs, ..) => (lhs, rhs), + _ => { + filtered_instructions.push(instruction); + continue; + } + }; + + let last_instruction_that_creates_inputs = filtered_instructions + .iter() + .rev() + .position(|&instruction_id| { + let results = dfg.instruction_results(instruction_id).to_vec(); + results.contains(&lhs) || results.contains(&rhs) + }) + // We iterate through the previous instructions in reverse order so the index is from the + // back of the vector + .map(|reversed_index| filtered_instructions.len() - reversed_index - 1); + + let insertion_index = last_instruction_that_creates_inputs + .map(|index| { + // We want to insert just after the last instruction that creates the inputs + index + 1 + }) + // If it doesn't depend from the previous instructions, then we insert at the start + .unwrap_or_default(); + + let already_inserted_for_this_instruction = inserted_at_instruction + .entry( + last_instruction_that_creates_inputs + .map(|index| filtered_instructions[index]), + ) + .or_default(); + + filtered_instructions.insert( + insertion_index + *already_inserted_for_this_instruction, + instruction, + ); + + *already_inserted_for_this_instruction += 1; + } + + *function.dfg[block].instructions_mut() = filtered_instructions; + } + } + self + } +} + +#[cfg(test)] +mod test { + use crate::ssa::{ + function_builder::FunctionBuilder, + ir::{ + function::RuntimeType, + instruction::{Binary, BinaryOp, Instruction}, + map::Id, + types::Type, + }, + }; + + #[test] + fn check_bubble_up_constrains() { + // fn main f0 { + // b0(v0: Field): + // v1 = add v0, Field 1 + // v2 = add v1, Field 1 + // constrain v0 == Field 1 'With message' + // constrain v2 == Field 3 + // constrain v0 == Field 1 + // constrain v1 == Field 2 + // constrain v1 == Field 2 'With message' + // } + // + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::field()); + + let one = builder.field_constant(1u128); + let two = builder.field_constant(2u128); + let three = builder.field_constant(3u128); + + let v1 = builder.insert_binary(v0, BinaryOp::Add, one); + let v2 = builder.insert_binary(v1, BinaryOp::Add, one); + builder.insert_constrain(v0, one, Some("With message".to_string())); + builder.insert_constrain(v2, three, None); + builder.insert_constrain(v0, one, None); + builder.insert_constrain(v1, two, None); + builder.insert_constrain(v1, two, Some("With message".to_string())); + builder.terminate_with_return(vec![]); + + let ssa = builder.finish(); + + // Expected output: + // + // fn main f0 { + // b0(v0: Field): + // constrain v0 == Field 1 'With message' + // constrain v0 == Field 1 + // v1 = add v0, Field 1 + // constrain v1 == Field 2 + // constrain v1 == Field 2 'With message' + // v2 = add v1, Field 1 + // constrain v2 == Field 3 + // } + // + let ssa = ssa.bubble_up_constrains(); + let main = ssa.main(); + let block = &main.dfg[main.entry_block()]; + assert_eq!(block.instructions().len(), 7); + + let expected_instructions = vec![ + Instruction::Constrain(v0, one, Some("With message".to_string())), + Instruction::Constrain(v0, one, None), + Instruction::Binary(Binary { lhs: v0, rhs: one, operator: BinaryOp::Add }), + Instruction::Constrain(v1, two, None), + Instruction::Constrain(v1, two, Some("With message".to_string())), + Instruction::Binary(Binary { lhs: v1, rhs: one, operator: BinaryOp::Add }), + Instruction::Constrain(v2, three, None), + ]; + + for (index, instruction) in block.instructions().iter().enumerate() { + assert_eq!(&main.dfg[*instruction], &expected_instructions[index]); + } + } +} diff --git a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs index f48e6f2a129..addaee3ba8d 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/constant_folding.rs @@ -40,6 +40,7 @@ impl Ssa { /// Performs constant folding on each instruction. /// /// See [`constant_folding`][self] module for more information. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn fold_constants(mut self) -> Ssa { for function in self.functions.values_mut() { constant_fold(function); @@ -183,7 +184,7 @@ mod test { function_builder::FunctionBuilder, ir::{ function::RuntimeType, - instruction::{BinaryOp, Instruction, TerminatorInstruction}, + instruction::{Binary, BinaryOp, Instruction, TerminatorInstruction}, map::Id, types::Type, value::{Value, ValueId}, @@ -246,6 +247,117 @@ mod test { } } + #[test] + fn redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8. + // The expected return afterwards should be v2. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant guarantees that `v0/constant < 2^8`. We then do not need to truncate the result. + let constant = 2_u128.pow(8); + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(Field 2: Field): + // return Field 9 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + println!("{ssa}"); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 1); + let instruction = &main.dfg[instructions[0]]; + + assert_eq!( + instruction, + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + } + + #[test] + fn non_redundant_truncation() { + // fn main f0 { + // b0(v0: u16, v1: u16): + // v2 = div v0, v1 + // v3 = truncate v2 to 8 bits, max_bit_size: 16 + // return v3 + // } + // + // After constructing this IR, we set the value of v1 to 2^8 - 1. + // This should not result in the truncation being removed. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::unsigned(16)); + let v1 = builder.add_parameter(Type::unsigned(16)); + + // Note that this constant does not guarantee that `v0/constant < 2^8`. We must then truncate the result. + let constant = 2_u128.pow(8) - 1; + let constant = builder.numeric_constant(constant, Type::field()); + + let v2 = builder.insert_binary(v0, BinaryOp::Div, v1); + let v3 = builder.insert_truncate(v2, 8, 16); + builder.terminate_with_return(vec![v3]); + + let mut ssa = builder.finish(); + let main = ssa.main_mut(); + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); // The final return is not counted + + // Expected output: + // + // fn main f0 { + // b0(v0: u16, Field 255: Field): + // v5 = div v0, Field 255 + // v6 = truncate v5 to 8 bits, max_bit_size: 16 + // return v6 + // } + main.dfg.set_value_from_id(v1, constant); + + let ssa = ssa.fold_constants(); + let main = ssa.main(); + + let instructions = main.dfg[main.entry_block()].instructions(); + assert_eq!(instructions.len(), 2); + + assert_eq!( + &main.dfg[instructions[0]], + &Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Div, rhs: constant }) + ); + assert_eq!( + &main.dfg[instructions[1]], + &Instruction::Truncate { value: ValueId::test_new(5), bit_size: 8, max_bit_size: 16 } + ); + } + #[test] fn arrays_elements_are_updated() { // fn main f0 { @@ -279,11 +391,11 @@ mod test { let return_value_id = match entry_block.unwrap_terminator() { TerminatorInstruction::Return { return_values, .. } => return_values[0], - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), }; let return_element = match &main.dfg[return_value_id] { Value::Array { array, .. } => array[0], - _ => unreachable!(), + _ => unreachable!("Return type should be array"), }; // The return element is expected to refer to the new add instruction result. assert_eq!(main.dfg.resolve(new_add_instr_result), main.dfg.resolve(return_element)); @@ -292,7 +404,7 @@ mod test { #[test] fn instruction_deduplication() { // fn main f0 { - // b0(v0: Field): + // b0(v0: u16): // v1 = cast v0 as u32 // v2 = cast v0 as u32 // constrain v1 v2 @@ -307,7 +419,7 @@ mod test { // Compiling main let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - let v0 = builder.add_parameter(Type::field()); + let v0 = builder.add_parameter(Type::unsigned(16)); let v1 = builder.insert_cast(v0, Type::unsigned(32)); let v2 = builder.insert_cast(v0, Type::unsigned(32)); @@ -321,7 +433,7 @@ mod test { // Expected output: // // fn main f0 { - // b0(v0: Field): + // b0(v0: u16): // v1 = cast v0 as u32 // } let ssa = ssa.fold_constants(); @@ -331,6 +443,69 @@ mod test { assert_eq!(instructions.len(), 1); let instruction = &main.dfg[instructions[0]]; - assert_eq!(instruction, &Instruction::Cast(ValueId::test_new(0), Type::unsigned(32))); + assert_eq!(instruction, &Instruction::Cast(v0, Type::unsigned(32))); + } + + #[test] + fn constraint_decomposition() { + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v4 u1 1 + // } + // + // When constructing this IR, we should automatically decompose the constraint to be in terms of `v0`, `v1` and `v2`. + // + // The mul instructions are retained and will be removed in the dead instruction elimination pass. + let main_id = Id::test_new(0); + + // Compiling main + let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); + let v0 = builder.add_parameter(Type::bool()); + let v1 = builder.add_parameter(Type::bool()); + let v2 = builder.add_parameter(Type::bool()); + + let v3 = builder.insert_binary(v0, BinaryOp::Mul, v1); + let v4 = builder.insert_not(v2); + let v5 = builder.insert_binary(v3, BinaryOp::Mul, v4); + + // This constraint is automatically decomposed when it is inserted. + let v_true = builder.numeric_constant(true, Type::bool()); + builder.insert_constrain(v5, v_true, None); + + let v_false = builder.numeric_constant(false, Type::bool()); + + // Expected output: + // + // fn main f0 { + // b0(v0: u1, v1: u1, v2: u1): + // v3 = mul v0 v1 + // v4 = not v2 + // v5 = mul v3 v4 + // constrain v0 u1 1 + // constrain v1 u1 1 + // constrain v2 u1 0 + // } + + let ssa = builder.finish(); + let main = ssa.main(); + let instructions = main.dfg[main.entry_block()].instructions(); + + assert_eq!(instructions.len(), 6); + + assert_eq!( + main.dfg[instructions[0]], + Instruction::Binary(Binary { lhs: v0, operator: BinaryOp::Mul, rhs: v1 }) + ); + assert_eq!(main.dfg[instructions[1]], Instruction::Not(v2)); + assert_eq!( + main.dfg[instructions[2]], + Instruction::Binary(Binary { lhs: v3, operator: BinaryOp::Mul, rhs: v4 }) + ); + assert_eq!(main.dfg[instructions[3]], Instruction::Constrain(v0, v_true, None)); + assert_eq!(main.dfg[instructions[4]], Instruction::Constrain(v1, v_true, None)); + assert_eq!(main.dfg[instructions[5]], Instruction::Constrain(v2, v_false, None)); } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs index 62b335be1e2..b7f154397a6 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/defunctionalize.rs @@ -52,6 +52,7 @@ struct DefunctionalizationContext { } impl Ssa { + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn defunctionalize(mut self) -> Ssa { // Find all functions used as value that share the same signature let variants = find_variants(&self); diff --git a/compiler/noirc_evaluator/src/ssa/opt/die.rs b/compiler/noirc_evaluator/src/ssa/opt/die.rs index 492e96dc08c..f7d8adb5275 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/die.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/die.rs @@ -17,6 +17,7 @@ use crate::ssa::{ impl Ssa { /// Performs Dead Instruction Elimination (DIE) to remove any instructions with /// unused results. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn dead_instruction_elimination(mut self) -> Ssa { for function in self.functions.values_mut() { dead_instruction_elimination(function); @@ -53,7 +54,7 @@ struct Context { instructions_to_remove: HashSet, /// IncrementRc instructions must be revisited after the main DIE pass since - /// they are technically side-effectful but we stil want to remove them if their + /// they technically contain side-effects but we still want to remove them if their /// `value` parameter is not used elsewhere. increment_rc_instructions: Vec<(InstructionId, BasicBlockId)>, } diff --git a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs b/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs deleted file mode 100644 index f5e9598114c..00000000000 --- a/compiler/noirc_evaluator/src/ssa/opt/fill_internal_slices.rs +++ /dev/null @@ -1,764 +0,0 @@ -//! This module defines the internal slices data fill pass. -//! The purpose of this pass is to fill out nested slice values represented by SSA array values. -//! "Filling out" a nested slice specifically refers to making a nested slice's internal slice types -//! match up in their size. This pass is necessary for dynamic array operations to work in ACIR gen -//! as we need to have a known size for any memory operations. As slice types do not carry a size we -//! need to make sure all nested internal slices have the same size in order to accurately -//! read from or write to a nested slice. This pass ultimately attaches dummy data to any smaller internal slice types. -//! -//! A simple example: -//! If we have a slice of the type [[Field]] which is of length 2. The internal slices themselves -//! could be of different sizes, such as 3 and 4. An array operation on this nested slice would look -//! something like below: -//! array_get [Field 3, [Field 1, Field 1, Field 1], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! Will get translated into a new instruction like such: -//! array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index Field v0 -//! -//! -//! TODO(#3188): Currently the pass only works on a single flattened block. This should be updated in followup work. -//! The steps of the pass are as follows: -//! - Process each instruction of the block to collect relevant slice size information. We want to find the maximum size that a nested slice -//! potentially could be. Slices can potentially be set to larger array values or used in intrinsics that increase or shorten their size. -//! - Track all array constants and compute an initial map of their nested slice sizes. The slice sizes map is simply a map of an SSA array value -//! to its array size and then any child slice values that may exist. -//! - We also track a map to resolve a starting array constant to its final possible array value. This map is updated on the appropriate instructions -//! such as ArraySet or any slice intrinsics. -//! - On an ArrayGet operation add the resulting value as a possible child of the original slice. In SSA we will reuse the same memory block -//! for the nested slice and must account for an internal slice being fetched and set to a larger value, otherwise we may have an out of bounds error. -//! Also set the resulting fetched value to have the same internal slice size map as the children of the original array used in the operation. -//! - On an ArraySet operation we set the resulting value to have the same slice sizes map as the original array used in the operation. Like the result of -//! an ArrayGet we need to also add the `value` for an ArraySet as a possible child slice of the original array. -//! - For slice intrinsics we set the resulting value to have the same slice sizes map as the original array the same way as we do in an ArraySet. -//! However, with a slice intrinsic we also increase the size for the respective slice intrinsics. -//! We do not decrement the size on intrinsics that could remove values from a slice. This is because we could potentially go back to the smaller slice size, -//! not fill in the appropriate dummies and then get an out of bounds error later when executing the ACIR. We always want to compute -//! what a slice maximum size could be. -//! - Now we need to add each instruction back except with the updated original array values. -//! - Resolve the original slice value to what its final value would be using the previously computed map. -//! - Find the max size as each layer of the recursive nested slice type. -//! For instance in the example above we have a slice of depth 2 with the max sizes of [2, 4]. -//! - Follow the slice type to check whether the SSA value is under the specified max size. If a slice value -//! is under the max size we then attach dummy data. -//! - Construct a final nested slice with the now attached dummy data and replace the original array in the previously -//! saved ArrayGet and ArraySet instructions. - -use crate::ssa::{ - ir::{ - basic_block::BasicBlockId, - dfg::CallStack, - function::{Function, RuntimeType}, - function_inserter::FunctionInserter, - instruction::{Instruction, InstructionId, Intrinsic}, - post_order::PostOrder, - types::Type, - value::{Value, ValueId}, - }, - ssa_gen::Ssa, -}; - -use acvm::FieldElement; -use fxhash::FxHashMap as HashMap; - -impl Ssa { - pub(crate) fn fill_internal_slices(mut self) -> Ssa { - for function in self.functions.values_mut() { - // This pass is only necessary for generating ACIR and thus we should not - // process Brillig functions. - // The pass is also currently only setup to handle a function with a single flattened block. - // For complex Brillig functions we can expect this pass to panic. - if function.runtime() == RuntimeType::Acir { - let databus = function.dfg.data_bus.clone(); - let mut context = Context::new(function); - context.process_blocks(); - // update the databus with the new array instructions - function.dfg.data_bus = databus.map_values(|t| context.inserter.resolve(t)); - } - } - self - } -} - -struct Context<'f> { - post_order: PostOrder, - inserter: FunctionInserter<'f>, - - /// Maps SSA array values representing a slice's contents to its updated array value - /// after an array set or a slice intrinsic operation. - /// Maps original value -> result - mapped_slice_values: HashMap, - - /// Maps an updated array value following an array operation to its previous value. - /// When used in conjunction with `mapped_slice_values` we form a two way map of all array - /// values being used in array operations. - /// Maps result -> original value - slice_parents: HashMap, -} - -impl<'f> Context<'f> { - fn new(function: &'f mut Function) -> Self { - let post_order = PostOrder::with_function(function); - let inserter = FunctionInserter::new(function); - - Context { - post_order, - inserter, - mapped_slice_values: HashMap::default(), - slice_parents: HashMap::default(), - } - } - - fn process_blocks(&mut self) { - let mut block_order = PostOrder::with_function(self.inserter.function).into_vec(); - block_order.reverse(); - for block in block_order { - self.process_block(block); - } - } - - fn process_block(&mut self, block: BasicBlockId) { - // Fetch SSA values potentially with internal slices - let instructions = self.inserter.function.dfg[block].take_instructions(); - - // Values containing nested slices to be replaced - let mut slice_values = Vec::new(); - // Maps SSA array ID representing slice contents to its length and a list of its potential internal slices - // This map is constructed once for an array constant and is then updated - // according to the rules in `collect_slice_information`. - let mut slice_sizes: HashMap)> = HashMap::default(); - - // Update the slice sizes map to help find the potential max size of each nested slice. - for instruction in instructions.iter() { - self.collect_slice_information(*instruction, &mut slice_values, &mut slice_sizes); - } - - // Add back every instruction with the updated nested slices. - for instruction in instructions { - self.push_updated_instruction(instruction, &slice_values, &slice_sizes, block); - } - - self.inserter.map_terminator_in_place(block); - } - - /// Determine how the slice sizes map needs to be updated according to the provided instruction. - fn collect_slice_information( - &mut self, - instruction: InstructionId, - slice_values: &mut Vec, - slice_sizes: &mut HashMap)>, - ) { - let results = self.inserter.function.dfg.instruction_results(instruction); - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let res_typ = self.inserter.function.dfg.type_of_value(results[0]); - if res_typ.contains_slice_element() { - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - // Include the result in the parent array potential children - // If the result has internal slices and is called in an array set - // we could potentially have a new larger slice which we need to account for - inner_sizes.1.push(results[0]); - self.slice_parents.insert(results[0], *array); - - let inner_sizes_iter = inner_sizes.1.clone(); - for slice_value in inner_sizes_iter { - let inner_slice = slice_sizes.get(&slice_value).unwrap_or_else(|| { - panic!("ICE: should have inner slice set for {slice_value}") - }); - slice_sizes.insert(results[0], inner_slice.clone()); - if slice_value != results[0] { - self.mapped_slice_values.insert(slice_value, results[0]); - } - } - } - } - } - Instruction::ArraySet { array, value, .. } => { - let array_typ = self.inserter.function.dfg.type_of_value(*array); - let array_value = &self.inserter.function.dfg[*array]; - // If we have an SSA value containing nested slices we should mark it - // as a slice that potentially requires to be filled with dummy data. - if matches!(array_value, Value::Array { .. }) && array_typ.contains_slice_element() - { - slice_values.push(*array); - // Initial insertion into the slice sizes map - // Any other insertions should only occur if the value is already - // a part of the map. - self.compute_slice_sizes(*array, slice_sizes); - } - - let value_typ = self.inserter.function.dfg.type_of_value(*value); - if value_typ.contains_slice_element() { - self.compute_slice_sizes(*value, slice_sizes); - - let inner_sizes = slice_sizes.get_mut(array).expect("ICE expected slice sizes"); - inner_sizes.1.push(*value); - } - - if let Some(inner_sizes) = slice_sizes.get_mut(array) { - let inner_sizes = inner_sizes.clone(); - - slice_sizes.insert(results[0], inner_sizes); - - self.mapped_slice_values.insert(*array, results[0]); - self.slice_parents.insert(results[0], *array); - } - } - Instruction::Call { func, arguments } => { - let func = &self.inserter.function.dfg[*func]; - if let Value::Intrinsic(intrinsic) = func { - let (argument_index, result_index) = match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SlicePopBack - | Intrinsic::SliceInsert - | Intrinsic::SliceRemove => (1, 1), - // `pop_front` returns the popped element, and then the respective slice. - // This means in the case of a slice with structs, the result index of the popped slice - // will change depending on the number of elements in the struct. - // For example, a slice with four elements will look as such in SSA: - // v3, v4, v5, v6, v7, v8 = call slice_pop_front(v1, v2) - // where v7 is the slice length and v8 is the popped slice itself. - Intrinsic::SlicePopFront => (1, results.len() - 1), - _ => return, - }; - let slice_contents = arguments[argument_index]; - match intrinsic { - Intrinsic::SlicePushBack - | Intrinsic::SlicePushFront - | Intrinsic::SliceInsert => { - for arg in &arguments[(argument_index + 1)..] { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if element_typ.contains_slice_element() { - slice_values.push(*arg); - self.compute_slice_sizes(*arg, slice_sizes); - } - } - if let Some(inner_sizes) = slice_sizes.get_mut(&slice_contents) { - inner_sizes.0 += 1; - - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - Intrinsic::SlicePopBack - | Intrinsic::SliceRemove - | Intrinsic::SlicePopFront => { - // We do not decrement the size on intrinsics that could remove values from a slice. - // This is because we could potentially go back to the smaller slice and not fill in dummies. - // This pass should be tracking the potential max that a slice ***could be*** - if let Some(inner_sizes) = slice_sizes.get(&slice_contents) { - let inner_sizes = inner_sizes.clone(); - slice_sizes.insert(results[result_index], inner_sizes); - - self.mapped_slice_values - .insert(slice_contents, results[result_index]); - self.slice_parents.insert(results[result_index], slice_contents); - } - } - _ => {} - } - } - } - _ => {} - } - } - - fn push_updated_instruction( - &mut self, - instruction: InstructionId, - slice_values: &[ValueId], - slice_sizes: &HashMap)>, - block: BasicBlockId, - ) { - match &self.inserter.function.dfg[instruction] { - Instruction::ArrayGet { array, .. } | Instruction::ArraySet { array, .. } => { - if slice_values.contains(array) { - let (new_array_op_instr, call_stack) = - self.get_updated_array_op_instr(*array, slice_sizes, instruction); - self.inserter.push_instruction_value( - new_array_op_instr, - instruction, - block, - call_stack, - ); - } else { - self.inserter.push_instruction(instruction, block); - } - } - Instruction::Call { func: _, arguments } => { - let mut args_to_replace = Vec::new(); - for (i, arg) in arguments.iter().enumerate() { - let element_typ = self.inserter.function.dfg.type_of_value(*arg); - if slice_values.contains(arg) && element_typ.contains_slice_element() { - args_to_replace.push((i, *arg)); - } - } - if args_to_replace.is_empty() { - self.inserter.push_instruction(instruction, block); - } else { - // Using the original slice is ok to do as during collection of slice information - // we guarantee that only the arguments to slice intrinsic calls can be replaced. - let slice_contents = arguments[1]; - - let element_typ = self.inserter.function.dfg.type_of_value(arguments[1]); - let elem_depth = Self::compute_nested_slice_depth(&element_typ); - - let mut max_sizes = Vec::new(); - max_sizes.resize(elem_depth, 0); - // We want the max for the parent of the argument - let parent = self.resolve_slice_parent(slice_contents); - self.compute_slice_max_sizes(parent, slice_sizes, &mut max_sizes, 0); - - for (index, arg) in args_to_replace { - let element_typ = self.inserter.function.dfg.type_of_value(arg); - max_sizes.remove(0); - let new_array = - self.attach_slice_dummies(&element_typ, Some(arg), false, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = - self.inserter.map_instruction(instruction_id); - let new_call_instr = match instruction { - Instruction::Call { func, mut arguments } => { - arguments[index] = new_array; - Instruction::Call { func, arguments } - } - _ => panic!("Expected call instruction"), - }; - self.inserter.push_instruction_value( - new_call_instr, - instruction_id, - block, - call_stack, - ); - } - } - } - _ => { - self.inserter.push_instruction(instruction, block); - } - } - } - - /// Construct an updated ArrayGet or ArraySet instruction where the array value - /// has been replaced by a newly filled in array according to the max internal - /// slice sizes. - fn get_updated_array_op_instr( - &mut self, - array_id: ValueId, - slice_sizes: &HashMap)>, - instruction: InstructionId, - ) -> (Instruction, CallStack) { - let mapped_slice_value = self.resolve_slice_value(array_id); - - let (current_size, _) = slice_sizes - .get(&mapped_slice_value) - .unwrap_or_else(|| panic!("should have slice sizes: {mapped_slice_value}")); - - let mut max_sizes = Vec::new(); - - let typ = self.inserter.function.dfg.type_of_value(array_id); - let depth = Self::compute_nested_slice_depth(&typ); - max_sizes.resize(depth, 0); - - max_sizes[0] = *current_size; - self.compute_slice_max_sizes(array_id, slice_sizes, &mut max_sizes, 1); - - let new_array = self.attach_slice_dummies(&typ, Some(array_id), true, &max_sizes); - - let instruction_id = instruction; - let (instruction, call_stack) = self.inserter.map_instruction(instruction_id); - let new_array_op_instr = match instruction { - Instruction::ArrayGet { index, .. } => { - Instruction::ArrayGet { array: new_array, index } - } - Instruction::ArraySet { index, value, .. } => { - Instruction::ArraySet { array: new_array, index, value } - } - _ => panic!("Expected array set"), - }; - - (new_array_op_instr, call_stack) - } - - fn attach_slice_dummies( - &mut self, - typ: &Type, - value: Option, - is_parent_slice: bool, - max_sizes: &[usize], - ) -> ValueId { - match typ { - Type::Numeric(_) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let zero = FieldElement::zero(); - self.inserter.function.dfg.make_constant(zero, Type::field()) - } - } - Type::Array(element_types, len) => { - if let Some(value) = value { - self.inserter.resolve(value) - } else { - let mut array = im::Vector::new(); - for _ in 0..*len { - for typ in element_types.iter() { - array.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(array, typ.clone()) - } - } - Type::Slice(element_types) => { - let (current_size, max_sizes) = - max_sizes.split_first().expect("ICE: Missing internal slice max size"); - let mut max_size = *current_size; - if let Some(value) = value { - let mut slice = im::Vector::new(); - - let value = self.inserter.function.dfg[value].clone(); - let array = match value { - Value::Array { array, .. } => array, - _ => { - panic!("Expected an array value"); - } - }; - - if is_parent_slice { - max_size = array.len() / element_types.len(); - } - for i in 0..max_size { - for (element_index, element_type) in element_types.iter().enumerate() { - let index_usize = i * element_types.len() + element_index; - let valid_index = index_usize < array.len(); - let maybe_value = - if valid_index { Some(array[index_usize]) } else { None }; - slice.push_back(self.attach_slice_dummies( - element_type, - maybe_value, - false, - max_sizes, - )); - } - } - - self.inserter.function.dfg.make_array(slice, typ.clone()) - } else { - let mut slice = im::Vector::new(); - for _ in 0..max_size { - for typ in element_types.iter() { - slice.push_back(self.attach_slice_dummies(typ, None, false, max_sizes)); - } - } - self.inserter.function.dfg.make_array(slice, typ.clone()) - } - } - Type::Reference(_) => { - unreachable!("ICE: Generating dummy data for references is unsupported") - } - Type::Function => { - unreachable!("ICE: Generating dummy data for functions is unsupported") - } - } - } - - // This methods computes a map representing a nested slice. - // The method also automatically computes the given max slice size - // at each depth of the recursive type. - // For example if we had a next slice - fn compute_slice_sizes( - &self, - array_id: ValueId, - slice_sizes: &mut HashMap)>, - ) { - if let Value::Array { array, typ } = &self.inserter.function.dfg[array_id].clone() { - if let Type::Slice(_) = typ { - let element_size = typ.element_size(); - let len = array.len() / element_size; - let mut slice_value = (len, vec![]); - for value in array { - let typ = self.inserter.function.dfg.type_of_value(*value); - if let Type::Slice(_) = typ { - slice_value.1.push(*value); - self.compute_slice_sizes(*value, slice_sizes); - } - } - // Mark the correct max size based upon an array values internal structure - let mut max_size = 0; - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 > max_size { - max_size = inner_slice.0; - } - } - for inner_value in slice_value.1.iter() { - let inner_slice = - slice_sizes.get_mut(inner_value).expect("ICE: should have inner slice set"); - if inner_slice.0 < max_size { - inner_slice.0 = max_size; - } - } - slice_sizes.insert(array_id, slice_value); - } - } - } - - /// Determine the maximum possible size of an internal slice at each - /// layer of a nested slice. - /// - /// If the slice map is incorrectly formed the function will exceed - /// the type's nested slice depth and panic. - fn compute_slice_max_sizes( - &self, - array_id: ValueId, - slice_sizes: &HashMap)>, - max_sizes: &mut Vec, - depth: usize, - ) { - let array_id = self.resolve_slice_value(array_id); - let (current_size, inner_slices) = slice_sizes - .get(&array_id) - .unwrap_or_else(|| panic!("should have slice sizes: {array_id}")); - - if inner_slices.is_empty() { - return; - } - - let mut max = *current_size; - for inner_slice in inner_slices.iter() { - let inner_slice = &self.resolve_slice_value(*inner_slice); - - let (inner_size, _) = slice_sizes[inner_slice]; - if inner_size > max { - max = inner_size; - } - self.compute_slice_max_sizes(*inner_slice, slice_sizes, max_sizes, depth + 1); - } - - if max > max_sizes[depth] { - max_sizes[depth] = max; - } - } - - /// Compute the depth of nested slices in a given Type. - /// The depth follows the recursive type structure of a slice. - fn compute_nested_slice_depth(typ: &Type) -> usize { - let mut depth = 0; - if let Type::Slice(element_types) = typ { - depth += 1; - for typ in element_types.as_ref() { - depth += Self::compute_nested_slice_depth(typ); - } - } - depth - } - - /// Resolves a ValueId representing a slice's contents to its updated value. - /// If there is no resolved value for the supplied value, the value which - /// was passed to the method is returned. - fn resolve_slice_value(&self, array_id: ValueId) -> ValueId { - match self.mapped_slice_values.get(&array_id) { - Some(value) => self.resolve_slice_value(*value), - None => array_id, - } - } - - /// Resolves a ValueId representing a slice's contents to its previous value. - /// If there is no resolved parent value it means we have the original slice value - /// and the value which was passed to the method is returned. - fn resolve_slice_parent(&self, array_id: ValueId) -> ValueId { - match self.slice_parents.get(&array_id) { - Some(value) => self.resolve_slice_parent(*value), - None => array_id, - } - } -} - -#[cfg(test)] -mod tests { - - use std::rc::Rc; - - use acvm::FieldElement; - use im::vector; - - use crate::ssa::{ - function_builder::FunctionBuilder, - ir::{ - dfg::DataFlowGraph, - function::RuntimeType, - instruction::{BinaryOp, Instruction}, - map::Id, - types::Type, - value::ValueId, - }, - }; - - #[test] - fn test_simple_nested_slice() { - // We want to test that a nested slice with two internal slices of primitive types - // fills the smaller internal slice with dummy data to match the length of the - // larger internal slice. - - // Note that slices are a represented by a tuple of (length, contents). - // The type of the nested slice in this test is [[Field]]. - // - // This is the original SSA: - // acir fn main f0 { - // b0(v0: Field): - // v2 = lt v0, Field 2 - // constrain v2 == Field 1 'Index out of bounds' - // v11 = array_get [[Field 3, [Field 1, Field 1, Field 1]], [Field 4, [Field 2, Field 2, Field 2, Field 2]]], index Field v0 - // constrain v11 == Field 4 - // return - // } - - let main_id = Id::test_new(0); - let mut builder = FunctionBuilder::new("main".into(), main_id, RuntimeType::Acir); - - let main_v0 = builder.add_parameter(Type::field()); - - let two = builder.field_constant(2_u128); - // Every slice access checks against the dynamic slice length - let slice_access_check = builder.insert_binary(main_v0, BinaryOp::Lt, two); - let one = builder.field_constant(1_u128); - builder.insert_constrain(slice_access_check, one, Some("Index out of bounds".to_owned())); - - let field_element_type = Rc::new(vec![Type::field()]); - let inner_slice_contents_type = Type::Slice(field_element_type); - - let inner_slice_small_len = builder.field_constant(3_u128); - let inner_slice_small_contents = - builder.array_constant(vector![one, one, one], inner_slice_contents_type.clone()); - - let inner_slice_big_len = builder.field_constant(4_u128); - let inner_slice_big_contents = - builder.array_constant(vector![two, two, two, two], inner_slice_contents_type.clone()); - - let outer_slice_element_type = Rc::new(vec![Type::field(), inner_slice_contents_type]); - let outer_slice_type = Type::Slice(outer_slice_element_type); - - let outer_slice_contents = builder.array_constant( - vector![ - inner_slice_small_len, - inner_slice_small_contents, - inner_slice_big_len, - inner_slice_big_contents - ], - outer_slice_type, - ); - // Fetching the length of the second nested slice - // We must use a parameter to main as we do not want the array operation to be simplified out during SSA gen. The filling of internal slices - // is necessary for dynamic nested slices and thus we want to generate the SSA that ACIR gen would be converting. - let array_get_res = builder.insert_array_get(outer_slice_contents, main_v0, Type::field()); - - let four = builder.field_constant(4_u128); - builder.insert_constrain(array_get_res, four, None); - builder.terminate_with_return(vec![]); - - // Note that now the smaller internal slice should have extra dummy data that matches the larger internal slice's size. - // - // Expected SSA: - // acir fn main f0 { - // b0(v0: Field): - // v10 = lt v0, Field 2 - // constrain v10 == Field 1 'Index out of bounds' - // v18 = array_get [Field 3, [Field 1, Field 1, Field 1, Field 0], Field 4, [Field 2, Field 2, Field 2, Field 2]], index v0 - // constrain v18 == Field 4 - // return - // } - - let ssa = builder.finish().fill_internal_slices(); - - let func = ssa.main(); - let block_id = func.entry_block(); - - // Check the array get expression has replaced its nested slice with a new slice - // where the internal slice has dummy data attached to it. - let instructions = func.dfg[block_id].instructions(); - let array_id = instructions - .iter() - .find_map(|instruction| { - if let Instruction::ArrayGet { array, .. } = func.dfg[*instruction] { - Some(array) - } else { - None - } - }) - .expect("Should find array_get instruction"); - - let (array_constant, _) = - func.dfg.get_array_constant(array_id).expect("should have an array constant"); - - let inner_slice_small_len = func - .dfg - .get_numeric_constant(array_constant[0]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_small_len, - FieldElement::from(3u128), - "The length of the smaller internal slice should be unchanged" - ); - - let (inner_slice_small_contents, _) = - func.dfg.get_array_constant(array_constant[1]).expect("should have an array constant"); - let small_capacity = inner_slice_small_contents.len(); - assert_eq!(small_capacity, 4, "The inner slice contents should contain dummy element"); - - compare_array_constants(&inner_slice_small_contents, &[1, 1, 1, 0], &func.dfg); - - let inner_slice_big_len = func - .dfg - .get_numeric_constant(array_constant[2]) - .expect("should have a numeric constant"); - assert_eq!( - inner_slice_big_len, - FieldElement::from(4u128), - "The length of the larger internal slice should be unchanged" - ); - - let (inner_slice_big_contents, _) = - func.dfg.get_array_constant(array_constant[3]).expect("should have an array constant"); - let big_capacity = inner_slice_big_contents.len(); - assert_eq!( - small_capacity, big_capacity, - "The length of both internal slice contents should be the same" - ); - - compare_array_constants(&inner_slice_big_contents, &[2u128; 4], &func.dfg); - } - - fn compare_array_constants( - got_list: &im::Vector, - expected_list: &[u128], - dfg: &DataFlowGraph, - ) { - for i in 0..got_list.len() { - let got_value = - dfg.get_numeric_constant(got_list[i]).expect("should have a numeric constant"); - assert_eq!( - got_value, - FieldElement::from(expected_list[i]), - "Value is different than expected" - ); - } - } -} diff --git a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs index f0ad610948c..6bdf2ab1c0a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/flatten_cfg.rs @@ -163,6 +163,7 @@ impl Ssa { /// This pass will modify any instructions with side effects in particular, often multiplying /// them by jump conditions to maintain correctness even when all branches of a jmpif are inlined. /// For more information, see the module-level comment at the top of this file. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn flatten_cfg(mut self) -> Ssa { flatten_function_cfg(self.main_mut()); self @@ -640,8 +641,25 @@ impl<'f> Context<'f> { match instruction { Instruction::Constrain(lhs, rhs, message) => { // Replace constraint `lhs == rhs` with `condition * lhs == condition * rhs`. - let lhs = self.handle_constrain_arg_side_effects(lhs, condition, &call_stack); - let rhs = self.handle_constrain_arg_side_effects(rhs, condition, &call_stack); + + // Condition needs to be cast to argument type in order to multiply them together. + let argument_type = self.inserter.function.dfg.type_of_value(lhs); + // Sanity check that we're not constraining non-primitive types + assert!(matches!(argument_type, Type::Numeric(_))); + + let casted_condition = self.insert_instruction( + Instruction::Cast(condition, argument_type), + call_stack.clone(), + ); + + let lhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, lhs, casted_condition), + call_stack.clone(), + ); + let rhs = self.insert_instruction( + Instruction::binary(BinaryOp::Mul, rhs, casted_condition), + call_stack, + ); Instruction::Constrain(lhs, rhs, message) } @@ -672,90 +690,6 @@ impl<'f> Context<'f> { } } - /// Given the arguments of a constrain instruction, multiplying them by the branch's condition - /// requires special handling in the case of complex types. - fn handle_constrain_arg_side_effects( - &mut self, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let argument_type = self.inserter.function.dfg.type_of_value(argument); - - match &argument_type { - Type::Numeric(_) => { - // Condition needs to be cast to argument type in order to multiply them together. - let casted_condition = self.insert_instruction( - Instruction::Cast(condition, argument_type), - call_stack.clone(), - ); - - self.insert_instruction( - Instruction::binary(BinaryOp::Mul, argument, casted_condition), - call_stack.clone(), - ) - } - Type::Array(_, _) => { - self.handle_array_constrain_arg(argument_type, argument, condition, call_stack) - } - Type::Slice(_) => { - panic!("Cannot use slices directly in a constrain statement") - } - Type::Reference(_) => { - panic!("Cannot use references directly in a constrain statement") - } - Type::Function => { - panic!("Cannot use functions directly in a constrain statement") - } - } - } - - fn handle_array_constrain_arg( - &mut self, - typ: Type, - argument: ValueId, - condition: ValueId, - call_stack: &CallStack, - ) -> ValueId { - let mut new_array = im::Vector::new(); - - let (element_types, len) = match &typ { - Type::Array(elements, len) => (elements, *len), - _ => panic!("Expected array type"), - }; - - for i in 0..len { - for (element_index, element_type) in element_types.iter().enumerate() { - let index = ((i * element_types.len() + element_index) as u128).into(); - let index = self.inserter.function.dfg.make_constant(index, Type::field()); - - let typevars = Some(vec![element_type.clone()]); - - let mut get_element = |array, typevars| { - let get = Instruction::ArrayGet { array, index }; - self.inserter - .function - .dfg - .insert_instruction_and_results( - get, - self.inserter.function.entry_block(), - typevars, - CallStack::new(), - ) - .first() - }; - - let element = get_element(argument, typevars); - - new_array.push_back( - self.handle_constrain_arg_side_effects(element, condition, call_stack), - ); - } - } - - self.inserter.function.dfg.make_array(new_array, typ) - } - fn undo_stores_in_then_branch(&mut self, then_branch: &Branch) { for (address, store) in &then_branch.store_values { let address = *address; @@ -1168,7 +1102,7 @@ mod test { let main = ssa.main(); let ret = match main.dfg[main.entry_block()].terminator() { Some(TerminatorInstruction::Return { return_values, .. }) => return_values[0], - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), }; let merged_values = get_all_constants_reachable_from_instruction(&main.dfg, ret); @@ -1539,7 +1473,7 @@ mod test { None => unreachable!("Expected constant 200 for return value"), } } - _ => unreachable!(), + _ => unreachable!("Should have terminator instruction"), } } } diff --git a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs index b4f12b2f897..776f22b2877 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/inlining.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/inlining.rs @@ -36,6 +36,7 @@ impl Ssa { /// changes. This is because if the function's id later becomes known by a later /// pass, we would need to re-run all of inlining anyway to inline it, so we might /// as well save the work for later instead of performing it twice. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn inline_functions(mut self) -> Ssa { self.functions = btree_map(get_entry_point_functions(&self), |entry_point| { let new_function = InlineContext::new(&self, entry_point).inline_all(&self); diff --git a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs index ce205c8d883..0a49ca4ecca 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mem2reg.rs @@ -86,6 +86,7 @@ use self::block::{Block, Expression}; impl Ssa { /// Attempts to remove any load instructions that recover values that are already available in /// scope, and attempts to remove stores that are subsequently redundant. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn mem2reg(mut self) -> Ssa { for function in self.functions.values_mut() { let mut context = PerFunctionContext::new(function); diff --git a/compiler/noirc_evaluator/src/ssa/opt/mod.rs b/compiler/noirc_evaluator/src/ssa/opt/mod.rs index 95784194d28..71725422a7a 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/mod.rs @@ -5,10 +5,10 @@ //! Generally, these passes are also expected to minimize the final amount of instructions. mod array_use; mod assert_constant; +mod bubble_up_constrains; mod constant_folding; mod defunctionalize; mod die; -mod fill_internal_slices; pub(crate) mod flatten_cfg; mod inlining; mod mem2reg; diff --git a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs index d491afc3d26..a31def8fd98 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/simplify_cfg.rs @@ -29,6 +29,7 @@ impl Ssa { /// only 1 successor then (2) also will be applied. /// /// Currently, 1 and 4 are unimplemented. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn simplify_cfg(mut self) -> Self { for function in self.functions.values_mut() { simplify_function(function); diff --git a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs index 50c2f5b1524..645adb6b350 100644 --- a/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs +++ b/compiler/noirc_evaluator/src/ssa/opt/unrolling.rs @@ -7,7 +7,7 @@ //! b. If we have previously modified any of the blocks in the loop, //! restart from step 1 to refresh the context. //! c. If not, try to unroll the loop. If successful, remember the modified -//! blocks. If unsuccessfuly either error if the abort_on_error flag is set, +//! blocks. If unsuccessfully either error if the abort_on_error flag is set, //! or otherwise remember that the loop failed to unroll and leave it unmodified. //! //! Note that this pass also often creates superfluous jmp instructions in the @@ -36,6 +36,7 @@ use fxhash::FxHashMap as HashMap; impl Ssa { /// Unroll all loops in each SSA function. /// If any loop cannot be unrolled, it is left as-is or in a partially unrolled state. + #[tracing::instrument(level = "trace", skip(self))] pub(crate) fn unroll_loops(mut self) -> Result { for function in self.functions.values_mut() { // Loop unrolling in brillig can lead to a code explosion currently. This can diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs index 501a03bcb5c..0e155776545 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/context.rs @@ -270,11 +270,14 @@ impl<'a> FunctionContext<'a> { /// helper function which add instructions to the block computing the absolute value of the /// given signed integer input. When the input is negative, we return its two complement, and itself when it is positive. fn absolute_value_helper(&mut self, input: ValueId, sign: ValueId, bit_size: u32) -> ValueId { + assert_eq!(self.builder.type_of_value(sign), Type::bool()); + // We compute the absolute value of lhs - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); let bit_width = self.builder.numeric_constant(FieldElement::from(2_i128.pow(bit_size)), Type::field()); - let sign_not = self.builder.insert_binary(one, BinaryOp::Sub, sign); + let sign_not = self.builder.insert_not(sign); + + // We use unsafe casts here, this is fine as we're casting to a `field` type. let as_field = self.builder.insert_cast(input, Type::field()); let sign_field = self.builder.insert_cast(sign, Type::field()); let positive_predicate = self.builder.insert_binary(sign_field, BinaryOp::Mul, as_field); @@ -310,12 +313,12 @@ impl<'a> FunctionContext<'a> { match operator { BinaryOpKind::Add | BinaryOpKind::Subtract => { // Result is computed modulo the bit size - let mut result = - self.builder.insert_truncate(result, bit_size, bit_size + 1); - result = self.builder.insert_cast(result, Type::unsigned(bit_size)); + let result = self.builder.insert_truncate(result, bit_size, bit_size + 1); + let result = + self.insert_safe_cast(result, Type::unsigned(bit_size), location); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); - self.builder.insert_cast(result, result_type) + self.insert_safe_cast(result, result_type, location) } BinaryOpKind::Multiply => { // Result is computed modulo the bit size @@ -324,62 +327,126 @@ impl<'a> FunctionContext<'a> { result = self.builder.insert_truncate(result, bit_size, 2 * bit_size); self.check_signed_overflow(result, lhs, rhs, operator, bit_size, location); - self.builder.insert_cast(result, result_type) + self.insert_safe_cast(result, result_type, location) } - BinaryOpKind::ShiftLeft => { - unreachable!("shift is not supported for signed integer") + BinaryOpKind::ShiftLeft | BinaryOpKind::ShiftRight => { + self.check_shift_overflow(result, rhs, bit_size, location, true) } _ => unreachable!("operator {} should not overflow", operator), } } Type::Numeric(NumericType::Unsigned { bit_size }) => { - let op_name = match operator { - BinaryOpKind::Add => "add", - BinaryOpKind::Subtract => "subtract", - BinaryOpKind::Multiply => "multiply", - BinaryOpKind::ShiftLeft => "left shift", - _ => unreachable!("operator {} should not overflow", operator), - }; + let dfg = &self.builder.current_function.dfg; - if operator == BinaryOpKind::Multiply && bit_size == 1 { - result - } else if operator == BinaryOpKind::ShiftLeft { - self.check_left_shift_overflow(result, rhs, bit_size, location) - } else { - let message = format!("attempt to {} with overflow", op_name); - let range_constraint = Instruction::RangeCheck { - value: result, - max_bit_size: bit_size, - assert_message: Some(message), - }; - self.builder.set_location(location).insert_instruction(range_constraint, None); - result + let max_lhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(lhs); + let max_rhs_bits = self.builder.current_function.dfg.get_value_max_num_bits(rhs); + + match operator { + BinaryOpKind::Add => { + if std::cmp::max(max_lhs_bits, max_rhs_bits) < bit_size { + // `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return result; + } + + let message = "attempt to add with overflow".to_string(); + self.builder.set_location(location).insert_range_check( + result, + bit_size, + Some(message), + ); + } + BinaryOpKind::Subtract => { + if dfg.is_constant(lhs) && max_lhs_bits > max_rhs_bits { + // `lhs` is a fixed constant and `rhs` is restricted such that `lhs - rhs > 0` + // Note strict inequality as `rhs > lhs` while `max_lhs_bits == max_rhs_bits` is possible. + return result; + } + + let message = "attempt to subtract with overflow".to_string(); + self.builder.set_location(location).insert_range_check( + result, + bit_size, + Some(message), + ); + } + BinaryOpKind::Multiply => { + if bit_size == 1 || max_lhs_bits + max_rhs_bits <= bit_size { + // Either performing boolean multiplication (which cannot overflow), + // or `lhs` and `rhs` have both been casted up from smaller types and so cannot overflow. + return result; + } + + let message = "attempt to multiply with overflow".to_string(); + self.builder.set_location(location).insert_range_check( + result, + bit_size, + Some(message), + ); + } + BinaryOpKind::ShiftLeft => { + if let Some(rhs_const) = dfg.get_numeric_constant(rhs) { + let bit_shift_size = rhs_const.to_u128() as u32; + + if max_lhs_bits + bit_shift_size <= bit_size { + // `lhs` has been casted up from a smaller type such that shifting it by a constant + // `rhs` is known not to exceed the maximum bit size. + return result; + } + } + + self.check_shift_overflow(result, rhs, bit_size, location, false); + } + + _ => unreachable!("operator {} should not overflow", operator), } + + result } _ => result, } } - /// Overflow checks for shift-left - /// We use Rust behavior for shift left: + /// Overflow checks for bit-shift + /// We use Rust behavior for bit-shift: /// If rhs is more or equal than the bit size, then we overflow - /// If not, we do not overflow and shift left with 0 when bits are falling out of the bit size - fn check_left_shift_overflow( + /// If not, we do not overflow and shift with 0 when bits are falling out of the bit size + fn check_shift_overflow( &mut self, result: ValueId, rhs: ValueId, bit_size: u32, location: Location, + is_signed: bool, ) -> ValueId { + let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); + let rhs = if is_signed { + self.insert_safe_cast(rhs, Type::unsigned(bit_size), location) + } else { + rhs + }; + // Bit-shift with a negative number is an overflow + if is_signed { + // We compute the sign of rhs. + let half_width = self.builder.numeric_constant( + FieldElement::from(2_i128.pow(bit_size - 1)), + Type::unsigned(bit_size), + ); + let sign = self.builder.insert_binary(rhs, BinaryOp::Lt, half_width); + self.builder.set_location(location).insert_constrain( + sign, + one, + Some("attempt to bit-shift with overflow".to_string()), + ); + } + let max = self .builder .numeric_constant(FieldElement::from(bit_size as i128), Type::unsigned(bit_size)); let overflow = self.builder.insert_binary(rhs, BinaryOp::Lt, max); - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); self.builder.set_location(location).insert_constrain( overflow, one, - Some("attempt to left shift with overflow".to_owned()), + Some("attempt to bit-shift with overflow".to_owned()), ); self.builder.insert_truncate(result, bit_size, bit_size + 1) } @@ -406,19 +473,18 @@ impl<'a> FunctionContext<'a> { location: Location, ) { let is_sub = operator == BinaryOpKind::Subtract; - let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); let half_width = self.builder.numeric_constant( FieldElement::from(2_i128.pow(bit_size - 1)), Type::unsigned(bit_size), ); // We compute the sign of the operands. The overflow checks for signed integers depends on these signs - let lhs_as_unsigned = self.builder.insert_cast(lhs, Type::unsigned(bit_size)); - let rhs_as_unsigned = self.builder.insert_cast(rhs, Type::unsigned(bit_size)); + let lhs_as_unsigned = self.insert_safe_cast(lhs, Type::unsigned(bit_size), location); + let rhs_as_unsigned = self.insert_safe_cast(rhs, Type::unsigned(bit_size), location); let lhs_sign = self.builder.insert_binary(lhs_as_unsigned, BinaryOp::Lt, half_width); let mut rhs_sign = self.builder.insert_binary(rhs_as_unsigned, BinaryOp::Lt, half_width); let message = if is_sub { // lhs - rhs = lhs + (-rhs) - rhs_sign = self.builder.insert_binary(one, BinaryOp::Sub, rhs_sign); + rhs_sign = self.builder.insert_not(rhs_sign); "attempt to subtract with overflow".to_string() } else { "attempt to add with overflow".to_string() @@ -444,29 +510,29 @@ impl<'a> FunctionContext<'a> { let product_field = self.builder.insert_binary(lhs_abs, BinaryOp::Mul, rhs_abs); // It must not already overflow the bit_size let message = "attempt to multiply with overflow".to_string(); - let size_overflow = Instruction::RangeCheck { - value: product_field, - max_bit_size: bit_size, - assert_message: Some(message.clone()), - }; - self.builder.set_location(location).insert_instruction(size_overflow, None); + self.builder.set_location(location).insert_range_check( + product_field, + bit_size, + Some(message.clone()), + ); let product = self.builder.insert_cast(product_field, Type::unsigned(bit_size)); // Then we check the signed product fits in a signed integer of bit_size-bits - let not_same = self.builder.insert_binary(one, BinaryOp::Sub, same_sign); + let not_same = self.builder.insert_not(same_sign); let not_same_sign_field = - self.builder.insert_cast(not_same, Type::unsigned(bit_size)); + self.insert_safe_cast(not_same, Type::unsigned(bit_size), location); let positive_maximum_with_offset = self.builder.insert_binary(half_width, BinaryOp::Add, not_same_sign_field); let product_overflow_check = self.builder.insert_binary(product, BinaryOp::Lt, positive_maximum_with_offset); + + let one = self.builder.numeric_constant(FieldElement::one(), Type::bool()); self.builder.set_location(location).insert_constrain( product_overflow_check, one, Some(message), ); } - BinaryOpKind::ShiftLeft => unreachable!("shift is not supported for signed integer"), _ => unreachable!("operator {} should not overflow", operator), } } @@ -482,19 +548,26 @@ impl<'a> FunctionContext<'a> { mut rhs: ValueId, location: Location, ) -> Values { + let result_type = self.builder.type_of_value(lhs); let mut result = match operator { BinaryOpKind::ShiftLeft => { - let result_type = self.builder.current_function.dfg.type_of_value(lhs); let bit_size = match result_type { Type::Numeric(NumericType::Signed { bit_size }) | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, - _ => unreachable!("ICE: Truncation attempted on non-integer"), + _ => unreachable!("ICE: left-shift attempted on non-integer"), }; self.builder.insert_wrapping_shift_left(lhs, rhs, bit_size) } - BinaryOpKind::ShiftRight => self.builder.insert_shift_right(lhs, rhs), + BinaryOpKind::ShiftRight => { + let bit_size = match result_type { + Type::Numeric(NumericType::Signed { bit_size }) + | Type::Numeric(NumericType::Unsigned { bit_size }) => bit_size, + _ => unreachable!("ICE: right-shift attempted on non-integer"), + }; + self.builder.insert_shift_right(lhs, rhs, bit_size) + } BinaryOpKind::Equal | BinaryOpKind::NotEqual - if matches!(self.builder.type_of_value(lhs), Type::Array(..)) => + if matches!(result_type, Type::Array(..)) => { return self.insert_array_equality(lhs, operator, rhs, location) } @@ -639,6 +712,29 @@ impl<'a> FunctionContext<'a> { reshaped_return_values } + /// Inserts a cast instruction at the end of the current block and returns the results + /// of the cast. + /// + /// Compared to `self.builder.insert_cast`, this version will automatically truncate `value` to be a valid `typ`. + pub(super) fn insert_safe_cast( + &mut self, + mut value: ValueId, + typ: Type, + location: Location, + ) -> ValueId { + self.builder.set_location(location); + + // To ensure that `value` is a valid `typ`, we insert an `Instruction::Truncate` instruction beforehand if + // we're narrowing the type size. + let incoming_type_size = self.builder.type_of_value(value).bit_size(); + let target_type_size = typ.bit_size(); + if target_type_size < incoming_type_size { + value = self.builder.insert_truncate(value, target_type_size, incoming_type_size); + } + + self.builder.insert_cast(value, typ) + } + /// Create a const offset of an address for an array load or store pub(super) fn make_offset(&mut self, mut address: ValueId, offset: u128) -> ValueId { if offset != 0 { diff --git a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs index 2f9809799a5..782a10be036 100644 --- a/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs +++ b/compiler/noirc_evaluator/src/ssa/ssa_gen/mod.rs @@ -8,8 +8,8 @@ use context::SharedContext; use iter_extended::{try_vecmap, vecmap}; use noirc_errors::Location; use noirc_frontend::{ - monomorphization::ast::{self, Binary, Expression, Program}, - BinaryOpKind, Visibility, + monomorphization::ast::{self, Expression, Program}, + Visibility, }; use crate::{ @@ -194,12 +194,14 @@ impl<'a> FunctionContext<'a> { let typ = Self::convert_type(&array.typ).flatten(); Ok(match array.typ { - ast::Type::Array(_, _) => self.codegen_array(elements, typ[0].clone()), + ast::Type::Array(_, _) => { + self.codegen_array_checked(elements, typ[0].clone())? + } ast::Type::Slice(_) => { let slice_length = self.builder.field_constant(array.contents.len() as u128); - - let slice_contents = self.codegen_array(elements, typ[1].clone()); + let slice_contents = + self.codegen_array_checked(elements, typ[1].clone())?; Tree::Branch(vec![slice_length.into(), slice_contents]) } _ => unreachable!( @@ -238,6 +240,18 @@ impl<'a> FunctionContext<'a> { self.codegen_array(elements, typ) } + // Codegen an array but make sure that we do not have a nested slice + fn codegen_array_checked( + &mut self, + elements: Vec, + typ: Type, + ) -> Result { + if typ.is_nested_slice() { + return Err(RuntimeError::NestedSlice { call_stack: self.builder.get_call_stack() }); + } + Ok(self.codegen_array(elements, typ)) + } + /// Codegen an array by allocating enough space for each element and inserting separate /// store instructions until each element is stored. The store instructions will be separated /// by add instructions to calculate the new offset address to store to next. @@ -441,8 +455,8 @@ impl<'a> FunctionContext<'a> { fn codegen_cast(&mut self, cast: &ast::Cast) -> Result { let lhs = self.codegen_non_tuple_expression(&cast.lhs)?; let typ = Self::convert_non_tuple_type(&cast.r#type); - self.builder.set_location(cast.location); - Ok(self.builder.insert_cast(lhs, typ).into()) + + Ok(self.insert_safe_cast(lhs, typ, cast.location).into()) } /// Codegens a for loop, creating three new blocks in the process. @@ -660,42 +674,10 @@ impl<'a> FunctionContext<'a> { location: Location, assert_message: Option, ) -> Result { - match expr { - // If we're constraining an equality to be true then constrain the two sides directly. - Expression::Binary(Binary { lhs, operator: BinaryOpKind::Equal, rhs, .. }) => { - let lhs = self.codegen_non_tuple_expression(lhs)?; - let rhs = self.codegen_non_tuple_expression(rhs)?; - if matches!(self.builder.type_of_value(lhs), Type::Array(..)) { - // Expand constraints on array equality so that: - // assert(a == b); - // becomes - // let r = a == b; - // assert(r); - let expr = self - .insert_binary(lhs, BinaryOpKind::Equal, rhs, location) - .into_leaf() - .eval(self); - let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain( - expr, - true_literal, - assert_message, - ); - } else { - self.builder.set_location(location).insert_constrain(lhs, rhs, assert_message); - } - } + let expr = self.codegen_non_tuple_expression(expr)?; + let true_literal = self.builder.numeric_constant(true, Type::bool()); + self.builder.set_location(location).insert_constrain(expr, true_literal, assert_message); - _ => { - let expr = self.codegen_non_tuple_expression(expr)?; - let true_literal = self.builder.numeric_constant(true, Type::bool()); - self.builder.set_location(location).insert_constrain( - expr, - true_literal, - assert_message, - ); - } - } Ok(Self::unit_value()) } diff --git a/compiler/noirc_frontend/Cargo.toml b/compiler/noirc_frontend/Cargo.toml index 6f3c35a814a..80d767f7f2c 100644 --- a/compiler/noirc_frontend/Cargo.toml +++ b/compiler/noirc_frontend/Cargo.toml @@ -22,7 +22,9 @@ serde.workspace = true rustc-hash = "1.1.0" small-ord-set = "0.1.3" regex = "1.9.1" +tracing.workspace = true [dev-dependencies] strum = "0.24" strum_macros = "0.24" +tempfile.workspace = true diff --git a/compiler/noirc_frontend/src/ast/mod.rs b/compiler/noirc_frontend/src/ast/mod.rs index 5c10d3fe8f0..d9af5893024 100644 --- a/compiler/noirc_frontend/src/ast/mod.rs +++ b/compiler/noirc_frontend/src/ast/mod.rs @@ -257,7 +257,20 @@ impl UnresolvedTypeExpression { BinaryOpKind::Multiply => BinaryTypeOperator::Multiplication, BinaryOpKind::Divide => BinaryTypeOperator::Division, BinaryOpKind::Modulo => BinaryTypeOperator::Modulo, - _ => unreachable!(), // impossible via operator_allowed check + + BinaryOpKind::Equal + | BinaryOpKind::NotEqual + | BinaryOpKind::Less + | BinaryOpKind::LessEqual + | BinaryOpKind::Greater + | BinaryOpKind::GreaterEqual + | BinaryOpKind::And + | BinaryOpKind::Or + | BinaryOpKind::Xor + | BinaryOpKind::ShiftRight + | BinaryOpKind::ShiftLeft => { + unreachable!("impossible via `operator_allowed` check") + } }; Ok(UnresolvedTypeExpression::BinaryOperation(lhs, op, rhs, expr.span)) } diff --git a/compiler/noirc_frontend/src/debug/mod.rs b/compiler/noirc_frontend/src/debug/mod.rs index 940bd4138b4..0484b6cf8b2 100644 --- a/compiler/noirc_frontend/src/debug/mod.rs +++ b/compiler/noirc_frontend/src/debug/mod.rs @@ -531,16 +531,16 @@ pub fn create_prologue_program(n: u32) -> String { format!( r#" #[oracle(__debug_member_assign_{n})] - unconstrained fn __debug_member_assign_oracle_{n}( + unconstrained fn __debug_oracle_member_assign_{n}( _var_id: u32, _value: T, {var_sig} ) {{}} - unconstrained fn __debug_member_assign_inner_{n}( + unconstrained fn __debug_inner_member_assign_{n}( var_id: u32, value: T, {var_sig} ) {{ - __debug_member_assign_oracle_{n}(var_id, value, {vars}); + __debug_oracle_member_assign_{n}(var_id, value, {vars}); }} pub fn __debug_member_assign_{n}(var_id: u32, value: T, {var_sig}) {{ - __debug_member_assign_inner_{n}(var_id, value, {vars}); + __debug_inner_member_assign_{n}(var_id, value, {vars}); }} "# diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs index ae061792125..c768ea96f8f 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_crate.rs @@ -4,7 +4,7 @@ use crate::graph::CrateId; use crate::hir::def_map::{CrateDefMap, LocalModuleId, ModuleId}; use crate::hir::resolution::errors::ResolverError; -use crate::hir::resolution::import::{resolve_imports, ImportDirective}; +use crate::hir::resolution::import::{resolve_import, ImportDirective}; use crate::hir::resolution::resolver::Resolver; use crate::hir::resolution::{ collect_impls, collect_trait_impls, path_resolver, resolve_free_functions, resolve_globals, @@ -20,8 +20,8 @@ use crate::node_interner::{FuncId, NodeInterner, StmtId, StructId, TraitId, Type use crate::parser::{ParserError, SortedModule}; use crate::{ ExpressionKind, Ident, LetStatement, Literal, NoirFunction, NoirStruct, NoirTrait, - NoirTypeAlias, Path, PathKind, Type, UnresolvedGenerics, UnresolvedTraitConstraint, - UnresolvedType, + NoirTypeAlias, Path, PathKind, Type, TypeBindings, UnresolvedGenerics, + UnresolvedTraitConstraint, UnresolvedType, }; use fm::FileId; use iter_extended::vecmap; @@ -90,6 +90,7 @@ pub struct UnresolvedTraitImpl { pub file_id: FileId, pub module_id: LocalModuleId, pub trait_id: Option, + pub trait_generics: Vec, pub trait_path: Path, pub object_type: UnresolvedType, pub methods: UnresolvedFunctions, @@ -259,37 +260,33 @@ impl DefCollector { ); } - // Resolve unresolved imports collected from the crate - let (resolved, unresolved_imports) = - resolve_imports(crate_id, def_collector.collected_imports, &context.def_maps); - - { - let current_def_map = context.def_maps.get(&crate_id).unwrap(); - errors.extend(vecmap(unresolved_imports, |(error, module_id)| { - let file_id = current_def_map.file_id(module_id); - let error = DefCollectorErrorKind::PathResolutionError(error); - (error.into(), file_id) - })); - }; - - // Populate module namespaces according to the imports used - let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); - for resolved_import in resolved { - let name = resolved_import.name; - for ns in resolved_import.resolved_namespace.iter_defs() { - let result = current_def_map.modules[resolved_import.module_scope.0].import( - name.clone(), - ns, - resolved_import.is_prelude, - ); - - if let Err((first_def, second_def)) = result { - let err = DefCollectorErrorKind::Duplicate { - typ: DuplicateType::Import, - first_def, - second_def, - }; - errors.push((err.into(), root_file_id)); + // Resolve unresolved imports collected from the crate, one by one. + for collected_import in def_collector.collected_imports { + match resolve_import(crate_id, collected_import, &context.def_maps) { + Ok(resolved_import) => { + // Populate module namespaces according to the imports used + let current_def_map = context.def_maps.get_mut(&crate_id).unwrap(); + + let name = resolved_import.name; + for ns in resolved_import.resolved_namespace.iter_defs() { + let result = current_def_map.modules[resolved_import.module_scope.0] + .import(name.clone(), ns, resolved_import.is_prelude); + + if let Err((first_def, second_def)) = result { + let err = DefCollectorErrorKind::Duplicate { + typ: DuplicateType::Import, + first_def, + second_def, + }; + errors.push((err.into(), root_file_id)); + } + } + } + Err((error, module_id)) => { + let current_def_map = context.def_maps.get(&crate_id).unwrap(); + let file_id = current_def_map.file_id(module_id); + let error = DefCollectorErrorKind::PathResolutionError(error); + errors.push((error.into(), file_id)); } } } @@ -460,75 +457,95 @@ fn type_check_functions( } // TODO(vitkov): Move this out of here and into type_check +#[allow(clippy::too_many_arguments)] pub(crate) fn check_methods_signatures( resolver: &mut Resolver, impl_methods: &Vec<(FileId, FuncId)>, trait_id: TraitId, + trait_name_span: Span, + // These are the generics on the trait itself from the impl. + // E.g. in `impl Foo for Bar`, this is `vec![A, B]`. + trait_generics: Vec, trait_impl_generic_count: usize, + file_id: FileId, errors: &mut Vec<(CompilationError, FileId)>, ) { let self_type = resolver.get_self_type().expect("trait impl must have a Self type").clone(); + let trait_generics = vecmap(trait_generics, |typ| resolver.resolve_type(typ)); // Temporarily bind the trait's Self type to self_type so we can type check let the_trait = resolver.interner.get_trait_mut(trait_id); the_trait.self_type_typevar.bind(self_type); + if trait_generics.len() != the_trait.generics.len() { + let error = DefCollectorErrorKind::MismatchGenericCount { + actual_generic_count: trait_generics.len(), + expected_generic_count: the_trait.generics.len(), + // Preferring to use 'here' over a more precise term like 'this reference' + // to try to make the error easier to understand for newer users. + location: "here it", + origin: the_trait.name.to_string(), + span: trait_name_span, + }; + errors.push((error.into(), file_id)); + } + + // We also need to bind the traits generics to the trait's generics on the impl + for (generic, binding) in the_trait.generics.iter().zip(trait_generics) { + generic.bind(binding); + } + // Temporarily take the trait's methods so we can use both them and a mutable reference // to the interner within the loop. let trait_methods = std::mem::take(&mut the_trait.methods); for (file_id, func_id) in impl_methods { - let impl_method = resolver.interner.function_meta(func_id); let func_name = resolver.interner.function_name(func_id).to_owned(); - let mut typecheck_errors = Vec::new(); - // This is None in the case where the impl block has a method that's not part of the trait. // If that's the case, a `MethodNotInTrait` error has already been thrown, and we can ignore // the impl method, since there's nothing in the trait to match its signature against. if let Some(trait_method) = trait_methods.iter().find(|method| method.name.0.contents == func_name) { - let impl_function_type = impl_method.typ.instantiate(resolver.interner); + let impl_method = resolver.interner.function_meta(func_id); let impl_method_generic_count = impl_method.typ.generic_count() - trait_impl_generic_count; // We subtract 1 here to account for the implicit generic `Self` type that is on all // traits (and thus trait methods) but is not required (or allowed) for users to specify. - let trait_method_generic_count = trait_method.generics().len() - 1; + let the_trait = resolver.interner.get_trait(trait_id); + let trait_method_generic_count = + trait_method.generics().len() - 1 - the_trait.generics.len(); if impl_method_generic_count != trait_method_generic_count { - let error = DefCollectorErrorKind::MismatchTraitImplementationNumGenerics { - impl_method_generic_count, - trait_method_generic_count, - trait_name: resolver.interner.get_trait(trait_id).name.to_string(), - method_name: func_name.to_string(), + let trait_name = resolver.interner.get_trait(trait_id).name.clone(); + + let error = DefCollectorErrorKind::MismatchGenericCount { + actual_generic_count: impl_method_generic_count, + expected_generic_count: trait_method_generic_count, + origin: format!("{}::{}", trait_name, func_name), + location: "this method", span: impl_method.location.span, }; errors.push((error.into(), *file_id)); } - if let Type::Function(impl_params, _, _) = impl_function_type.0 { - if trait_method.arguments().len() == impl_params.len() { - // Check the parameters of the impl method against the parameters of the trait method - let args = trait_method.arguments().iter(); - let args_and_params = args.zip(&impl_params).zip(&impl_method.parameters.0); - - for (parameter_index, ((expected, actual), (hir_pattern, _, _))) in - args_and_params.enumerate() - { - expected.unify(actual, &mut typecheck_errors, || { - TypeCheckError::TraitMethodParameterTypeMismatch { - method_name: func_name.to_string(), - expected_typ: expected.to_string(), - actual_typ: actual.to_string(), - parameter_span: hir_pattern.span(), - parameter_index: parameter_index + 1, - } - }); - } - } else { + // This instantiation is technically not needed. We could bind each generic in the + // trait function to the impl's corresponding generic but to do so we'd have to rely + // on the trait function's generics being first in the generic list, since the same + // list also contains the generic `Self` variable, and any generics on the trait itself. + // + // Instantiating the impl method's generics here instead is a bit less precise but + // doesn't rely on any orderings that may be changed. + let impl_function_type = impl_method.typ.instantiate(resolver.interner).0; + + let mut bindings = TypeBindings::new(); + let mut typecheck_errors = Vec::new(); + + if let Type::Function(impl_params, impl_return, _) = impl_function_type.as_monotype() { + if trait_method.arguments().len() != impl_params.len() { let error = DefCollectorErrorKind::MismatchTraitImplementationNumParameters { actual_num_parameters: impl_method.parameters.0.len(), expected_num_parameters: trait_method.arguments().len(), @@ -538,27 +555,51 @@ pub(crate) fn check_methods_signatures( }; errors.push((error.into(), *file_id)); } - } - // Check that impl method return type matches trait return type: - let resolved_return_type = - resolver.resolve_type(impl_method.return_type.get_type().into_owned()); + // Check the parameters of the impl method against the parameters of the trait method + let args = trait_method.arguments().iter(); + let args_and_params = args.zip(impl_params).zip(&impl_method.parameters.0); - // TODO: This is not right since it may bind generic return types - trait_method.return_type().unify(&resolved_return_type, &mut typecheck_errors, || { - let ret_type_span = impl_method.return_type.get_type().span; - let expr_span = ret_type_span.expect("return type must always have a span"); + for (parameter_index, ((expected, actual), (hir_pattern, _, _))) in + args_and_params.enumerate() + { + if expected.try_unify(actual, &mut bindings).is_err() { + typecheck_errors.push(TypeCheckError::TraitMethodParameterTypeMismatch { + method_name: func_name.to_string(), + expected_typ: expected.to_string(), + actual_typ: actual.to_string(), + parameter_span: hir_pattern.span(), + parameter_index: parameter_index + 1, + }); + } + } - let expected_typ = trait_method.return_type().to_string(); - let expr_typ = impl_method.return_type().to_string(); - TypeCheckError::TypeMismatch { expr_typ, expected_typ, expr_span } - }); + if trait_method.return_type().try_unify(impl_return, &mut bindings).is_err() { + let impl_method = resolver.interner.function_meta(func_id); + let ret_type_span = impl_method.return_type.get_type().span; + let expr_span = ret_type_span.expect("return type must always have a span"); + + let expected_typ = trait_method.return_type().to_string(); + let expr_typ = impl_method.return_type().to_string(); + let error = TypeCheckError::TypeMismatch { expr_typ, expected_typ, expr_span }; + typecheck_errors.push(error); + } + } else { + unreachable!( + "impl_function_type is not a function type, it is: {impl_function_type}" + ); + } errors.extend(typecheck_errors.iter().cloned().map(|e| (e.into(), *file_id))); } } + // Now unbind `Self` and the trait's generics let the_trait = resolver.interner.get_trait_mut(trait_id); the_trait.set_methods(trait_methods); the_trait.self_type_typevar.unbind(the_trait.self_type_typevar_id); + + for generic in &the_trait.generics { + generic.unbind(generic.id()); + } } diff --git a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs index 889574a9299..3cd60c33b8b 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/dc_mod.rs @@ -1,7 +1,7 @@ -use std::{collections::HashMap, vec}; +use std::{collections::HashMap, path::Path, vec}; use acvm::acir::acir_field::FieldOptions; -use fm::FileId; +use fm::{FileId, FileManager, FILE_EXTENSION}; use noirc_errors::Location; use crate::{ @@ -72,7 +72,7 @@ pub fn collect_defs( errors.extend(collector.collect_functions(context, ast.functions, crate_id)); - errors.extend(collector.collect_trait_impls(context, ast.trait_impls, crate_id)); + collector.collect_trait_impls(context, ast.trait_impls, crate_id); collector.collect_impls(context, ast.impls, crate_id); @@ -144,7 +144,7 @@ impl<'a> ModCollector<'a> { context: &mut Context, impls: Vec, krate: CrateId, - ) -> Vec<(CompilationError, fm::FileId)> { + ) { for trait_impl in impls { let trait_name = trait_impl.trait_name.clone(); @@ -168,11 +168,11 @@ impl<'a> ModCollector<'a> { generics: trait_impl.impl_generics, where_clause: trait_impl.where_clause, trait_id: None, // will be filled later + trait_generics: trait_impl.trait_generics, }; self.def_collector.collected_traits_impls.push(unresolved_trait_impl); } - vec![] } fn collect_trait_impl_function_overrides( @@ -524,7 +524,7 @@ impl<'a> ModCollector<'a> { ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; let child_file_id = - match context.file_manager.find_module(self.file_id, &mod_name.0.contents) { + match find_module(&context.file_manager, self.file_id, &mod_name.0.contents) { Ok(child_file_id) => child_file_id, Err(expected_path) => { let mod_name = mod_name.clone(); @@ -555,7 +555,8 @@ impl<'a> ModCollector<'a> { context.visited_files.insert(child_file_id, location); // Parse the AST for the module we just found and then recursively look for it's defs - let (ast, parsing_errors) = context.parse_file(child_file_id, crate_id); + let (ast, parsing_errors) = context.parsed_file_results(child_file_id); + let ast = ast.into_sorted(); errors.extend( parsing_errors.iter().map(|e| (e.clone().into(), child_file_id)).collect::>(), @@ -627,3 +628,119 @@ impl<'a> ModCollector<'a> { Ok(LocalModuleId(module_id)) } } + +fn find_module( + file_manager: &FileManager, + anchor: FileId, + mod_name: &str, +) -> Result { + let anchor_path = file_manager + .path(anchor) + .expect("File must exist in file manager in order for us to be resolving its imports.") + .with_extension(""); + let anchor_dir = anchor_path.parent().unwrap(); + + // if `anchor` is a `main.nr`, `lib.nr`, `mod.nr` or `{mod_name}.nr`, we check siblings of + // the anchor at `base/mod_name.nr`. + let candidate = if should_check_siblings_for_module(&anchor_path, anchor_dir) { + anchor_dir.join(format!("{mod_name}.{FILE_EXTENSION}")) + } else { + // Otherwise, we check for children of the anchor at `base/anchor/mod_name.nr` + anchor_path.join(format!("{mod_name}.{FILE_EXTENSION}")) + }; + + file_manager + .name_to_id(candidate.clone()) + .ok_or_else(|| candidate.as_os_str().to_string_lossy().to_string()) +} + +/// Returns true if a module's child modules are expected to be in the same directory. +/// Returns false if they are expected to be in a subdirectory matching the name of the module. +fn should_check_siblings_for_module(module_path: &Path, parent_path: &Path) -> bool { + if let Some(filename) = module_path.file_stem() { + // This check also means a `main.nr` or `lib.nr` file outside of the crate root would + // check its same directory for child modules instead of a subdirectory. Should we prohibit + // `main.nr` and `lib.nr` files outside of the crate root? + filename == "main" + || filename == "lib" + || filename == "mod" + || Some(filename) == parent_path.file_stem() + } else { + // If there's no filename, we arbitrarily return true. + // Alternatively, we could panic, but this is left to a different step where we + // ideally have some source location to issue an error. + true + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn path_resolve_file_module() { + let dir = tempdir().unwrap(); + + let entry_file_name = Path::new("my_dummy_file.nr"); + create_dummy_file(&dir, entry_file_name); + + let mut fm = FileManager::new(dir.path()); + + let file_id = fm.add_file_with_source(entry_file_name, "fn foo() {}".to_string()).unwrap(); + + let dep_file_name = Path::new("foo.nr"); + create_dummy_file(&dir, dep_file_name); + find_module(&fm, file_id, "foo").unwrap_err(); + } + + #[test] + fn path_resolve_sub_module() { + let dir = tempdir().unwrap(); + let mut fm = FileManager::new(dir.path()); + + // Create a lib.nr file at the root. + // we now have dir/lib.nr + let lib_nr_path = create_dummy_file(&dir, Path::new("lib.nr")); + let file_id = fm + .add_file_with_source(lib_nr_path.as_path(), "fn foo() {}".to_string()) + .expect("could not add file to file manager and obtain a FileId"); + + // Create a sub directory + // we now have: + // - dir/lib.nr + // - dir/sub_dir + let sub_dir = TempDir::new_in(&dir).unwrap(); + let sub_dir_name = sub_dir.path().file_name().unwrap().to_str().unwrap(); + + // Add foo.nr to the subdirectory + // we no have: + // - dir/lib.nr + // - dir/sub_dir/foo.nr + let foo_nr_path = create_dummy_file(&sub_dir, Path::new("foo.nr")); + fm.add_file_with_source(foo_nr_path.as_path(), "fn foo() {}".to_string()); + + // Add a parent module for the sub_dir + // we no have: + // - dir/lib.nr + // - dir/sub_dir.nr + // - dir/sub_dir/foo.nr + let sub_dir_nr_path = create_dummy_file(&dir, Path::new(&format!("{sub_dir_name}.nr"))); + fm.add_file_with_source(sub_dir_nr_path.as_path(), "fn foo() {}".to_string()); + + // First check for the sub_dir.nr file and add it to the FileManager + let sub_dir_file_id = find_module(&fm, file_id, sub_dir_name).unwrap(); + + // Now check for files in it's subdirectory + find_module(&fm, sub_dir_file_id, "foo").unwrap(); + } +} diff --git a/compiler/noirc_frontend/src/hir/def_collector/errors.rs b/compiler/noirc_frontend/src/hir/def_collector/errors.rs index 2b91c4b36c5..de45be48c4e 100644 --- a/compiler/noirc_frontend/src/hir/def_collector/errors.rs +++ b/compiler/noirc_frontend/src/hir/def_collector/errors.rs @@ -49,12 +49,12 @@ pub enum DefCollectorErrorKind { method_name: String, span: Span, }, - #[error("Mismatched number of generics in impl method")] - MismatchTraitImplementationNumGenerics { - impl_method_generic_count: usize, - trait_method_generic_count: usize, - trait_name: String, - method_name: String, + #[error("Mismatched number of generics in {location}")] + MismatchGenericCount { + actual_generic_count: usize, + expected_generic_count: usize, + location: &'static str, + origin: String, span: Span, }, #[error("Method is not defined in trait")] @@ -188,16 +188,16 @@ impl From for Diagnostic { "`{trait_name}::{method_name}` expects {expected_num_parameters} parameter{plural}, but this method has {actual_num_parameters}"); Diagnostic::simple_error(primary_message, "".to_string(), span) } - DefCollectorErrorKind::MismatchTraitImplementationNumGenerics { - impl_method_generic_count, - trait_method_generic_count, - trait_name, - method_name, + DefCollectorErrorKind::MismatchGenericCount { + actual_generic_count, + expected_generic_count, + location, + origin, span, } => { - let plural = if trait_method_generic_count == 1 { "" } else { "s" }; + let plural = if expected_generic_count == 1 { "" } else { "s" }; let primary_message = format!( - "`{trait_name}::{method_name}` expects {trait_method_generic_count} generic{plural}, but this method has {impl_method_generic_count}"); + "`{origin}` expects {expected_generic_count} generic{plural}, but {location} has {actual_generic_count}"); Diagnostic::simple_error(primary_message, "".to_string(), span) } DefCollectorErrorKind::MethodNotInTrait { trait_name, impl_method } => { diff --git a/compiler/noirc_frontend/src/hir/def_map/mod.rs b/compiler/noirc_frontend/src/hir/def_map/mod.rs index 69b4b858240..5b345816492 100644 --- a/compiler/noirc_frontend/src/hir/def_map/mod.rs +++ b/compiler/noirc_frontend/src/hir/def_map/mod.rs @@ -64,6 +64,7 @@ pub struct CrateDefMap { pub(crate) krate: CrateId, + /// Maps an external dependency's name to its root module id. pub(crate) extern_prelude: BTreeMap, } @@ -85,18 +86,20 @@ impl CrateDefMap { } // First parse the root file. - let root_file_id = context.get_root_id(crate_id); - let (mut ast, parsing_errors) = context.parse_file(root_file_id, crate_id); + let root_file_id = context.crate_graph[crate_id].root_file_id; + let (ast, parsing_errors) = context.parsed_file_results(root_file_id); + let mut ast = ast.into_sorted(); for macro_processor in ¯o_processors { - ast = match macro_processor.process_untyped_ast(ast, &crate_id, context) { - Ok(ast) => ast, + match macro_processor.process_untyped_ast(ast.clone(), &crate_id, context) { + Ok(processed_ast) => { + ast = processed_ast; + } Err((error, file_id)) => { let def_error = DefCollectorErrorKind::MacroError(error); errors.push((def_error.into(), file_id)); - return errors; } - }; + } } // Allocate a default Module for the root, giving it a ModuleId @@ -172,6 +175,29 @@ impl CrateDefMap { }) }) } + + /// Go through all modules in this crate, and find all functions in + /// each module with the #[export] attribute + pub fn get_all_exported_functions<'a>( + &'a self, + interner: &'a NodeInterner, + ) -> impl Iterator + 'a { + self.modules.iter().flat_map(|(_, module)| { + module.value_definitions().filter_map(|id| { + if let Some(func_id) = id.as_function() { + let attributes = interner.function_attributes(&func_id); + if attributes.secondary.contains(&SecondaryAttribute::Export) { + Some(func_id) + } else { + None + } + } else { + None + } + }) + }) + } + /// Go through all modules in this crate, find all `contract ... { ... }` declarations, /// and collect them all into a Vec. pub fn get_all_contracts(&self, interner: &NodeInterner) -> Vec { @@ -269,8 +295,8 @@ pub struct Contract { /// Given a FileId, fetch the File, from the FileManager and parse it's content. pub fn parse_file(fm: &FileManager, file_id: FileId) -> (ParsedModule, Vec) { - let file = fm.fetch_file(file_id); - parse_program(file.source()) + let file_source = fm.fetch_file(file_id).expect("File does not exist"); + parse_program(file_source) } impl std::ops::Index for CrateDefMap { diff --git a/compiler/noirc_frontend/src/hir/mod.rs b/compiler/noirc_frontend/src/hir/mod.rs index d38e3f0f844..2904a4df46b 100644 --- a/compiler/noirc_frontend/src/hir/mod.rs +++ b/compiler/noirc_frontend/src/hir/mod.rs @@ -8,33 +8,40 @@ use crate::debug::DebugState; use crate::graph::{CrateGraph, CrateId}; use crate::hir_def::function::FuncMeta; use crate::node_interner::{FuncId, NodeInterner, StructId}; -use crate::parser::{ParserError, SortedModule}; -use def_map::{parse_file, Contract, CrateDefMap}; +use crate::parser::ParserError; +use crate::ParsedModule; +use def_map::{Contract, CrateDefMap}; use fm::{FileId, FileManager}; use noirc_errors::Location; +use std::borrow::Cow; use std::collections::{BTreeMap, HashMap}; use self::def_map::TestFunction; +pub type ParsedFiles = HashMap)>; + /// Helper object which groups together several useful context objects used /// during name resolution. Once name resolution is finished, only the /// def_interner is required for type inference and monomorphization. -pub struct Context { +pub struct Context<'file_manager, 'parsed_files> { pub def_interner: NodeInterner, pub crate_graph: CrateGraph, pub(crate) def_maps: BTreeMap, - pub file_manager: FileManager, - pub root_crate_id: CrateId, + // In the WASM context, we take ownership of the file manager, + // which is why this needs to be a Cow. In all use-cases, the file manager + // is read-only however, once it has been passed to the Context. + pub file_manager: Cow<'file_manager, FileManager>, + pub debug_state: DebugState, - pub instrument_debug: bool, /// A map of each file that already has been visited from a prior `mod foo;` declaration. /// This is used to issue an error if a second `mod foo;` is declared to the same file. pub visited_files: BTreeMap, - /// Maps a given (contract) module id to the next available storage slot - /// for that contract. - pub storage_slots: HashMap, + // A map of all parsed files. + // Same as the file manager, we take ownership of the parsed files in the WASM context. + // Parsed files is also read only. + pub parsed_files: Cow<'parsed_files, ParsedFiles>, } pub type StorageSlot = u32; @@ -46,21 +53,38 @@ pub enum FunctionNameMatch<'a> { Contains(&'a str), } -impl Context { - pub fn new(file_manager: FileManager, crate_graph: CrateGraph) -> Context { +impl Context<'_, '_> { + pub fn new(file_manager: FileManager, parsed_files: ParsedFiles) -> Context<'static, 'static> { + Context { + def_interner: NodeInterner::default(), + def_maps: BTreeMap::new(), + visited_files: BTreeMap::new(), + crate_graph: CrateGraph::default(), + file_manager: Cow::Owned(file_manager), + debug_state: DebugState::default(), + parsed_files: Cow::Owned(parsed_files), + } + } + + pub fn from_ref_file_manager<'file_manager, 'parsed_files>( + file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, + ) -> Context<'file_manager, 'parsed_files> { Context { def_interner: NodeInterner::default(), def_maps: BTreeMap::new(), visited_files: BTreeMap::new(), - crate_graph, - file_manager, - root_crate_id: CrateId::Dummy, + crate_graph: CrateGraph::default(), + file_manager: Cow::Borrowed(file_manager), debug_state: DebugState::default(), - storage_slots: HashMap::new(), - instrument_debug: false, + parsed_files: Cow::Borrowed(parsed_files), } } + pub fn parsed_file_results(&self, file_id: fm::FileId) -> (ParsedModule, Vec) { + self.parsed_files.get(&file_id).expect("noir file wasn't parsed").clone() + } + /// Returns the CrateDefMap for a given CrateId. /// It is perfectly valid for the compiler to look /// up a CrateDefMap and it is not available. @@ -153,7 +177,7 @@ impl Context { None } - pub fn function_meta(&self, func_id: &FuncId) -> FuncMeta { + pub fn function_meta(&self, func_id: &FuncId) -> &FuncMeta { self.def_interner.function_meta(func_id) } @@ -195,12 +219,17 @@ impl Context { .collect() } - /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. - /// Returns [None] when definition is not found. - pub fn get_definition_location_from(&self, location: Location) -> Option { + pub fn get_all_exported_functions_in_crate(&self, crate_id: &CrateId) -> Vec<(String, FuncId)> { let interner = &self.def_interner; + let def_map = self.def_map(crate_id).expect("The local crate should be analyzed already"); - interner.find_location_index(location).and_then(|index| interner.resolve_location(index)) + def_map + .get_all_exported_functions(interner) + .map(|function_id| { + let function_name = self.function_name(&function_id).to_owned(); + (function_name, function_id) + }) + .collect() } /// Return a Vec of all `contract` declarations in the source code and the functions they contain @@ -214,21 +243,6 @@ impl Context { module_id.module(&self.def_maps) } - /// Given a FileId, fetch the File, from the FileManager and parse its content, - /// applying sorting and debug transforms if debug mode is enabled. - pub fn parse_file( - &mut self, - file_id: FileId, - crate_id: CrateId, - ) -> (SortedModule, Vec) { - let (mut ast, parsing_errors) = parse_file(&self.file_manager, file_id); - - if self.instrument_debug && crate_id == self.root_crate_id { - self.debug_state.insert_symbols(&mut ast); - } - (ast.into_sorted(), parsing_errors) - } - pub fn get_root_id(&self, crate_id: CrateId) -> FileId { self.crate_graph[crate_id].root_file_id } diff --git a/compiler/noirc_frontend/src/hir/resolution/errors.rs b/compiler/noirc_frontend/src/hir/resolution/errors.rs index c2f787313c6..390807afd17 100644 --- a/compiler/noirc_frontend/src/hir/resolution/errors.rs +++ b/compiler/noirc_frontend/src/hir/resolution/errors.rs @@ -60,8 +60,8 @@ pub enum ResolverError { NonStructWithGenerics { span: Span }, #[error("Cannot apply generics on Self type")] GenericsOnSelfType { span: Span }, - #[error("Incorrect amount of arguments to generic type constructor")] - IncorrectGenericCount { span: Span, struct_type: String, actual: usize, expected: usize }, + #[error("Incorrect amount of arguments to {item_name}")] + IncorrectGenericCount { span: Span, item_name: String, actual: usize, expected: usize }, #[error("{0}")] ParserError(Box), #[error("Function is not defined in a contract yet sets its contract visibility")] @@ -82,6 +82,8 @@ pub enum ResolverError { NonCrateFunctionCalled { name: String, span: Span }, #[error("Only sized types may be used in the entry point to a program")] InvalidTypeForEntryPoint { span: Span }, + #[error("Nested slices are not supported")] + NestedSlices { span: Span }, } impl ResolverError { @@ -259,12 +261,12 @@ impl From for Diagnostic { "Use an explicit type name or apply the generics at the start of the impl instead".into(), span, ), - ResolverError::IncorrectGenericCount { span, struct_type, actual, expected } => { + ResolverError::IncorrectGenericCount { span, item_name, actual, expected } => { let expected_plural = if expected == 1 { "" } else { "s" }; let actual_plural = if actual == 1 { "is" } else { "are" }; Diagnostic::simple_error( - format!("The struct type {struct_type} has {expected} generic{expected_plural} but {actual} {actual_plural} given here"), + format!("`{item_name}` has {expected} generic argument{expected_plural} but {actual} {actual_plural} given here"), "Incorrect number of generic arguments".into(), span, ) @@ -304,6 +306,11 @@ impl From for Diagnostic { ResolverError::InvalidTypeForEntryPoint { span } => Diagnostic::simple_error( "Only sized types may be used in the entry point to a program".to_string(), "Slices, references, or any type containing them may not be used in main or a contract function".to_string(), span), + ResolverError::NestedSlices { span } => Diagnostic::simple_error( + "Nested slices are not supported".into(), + "Try to use a constant sized array instead".into(), + span, + ), } } } diff --git a/compiler/noirc_frontend/src/hir/resolution/import.rs b/compiler/noirc_frontend/src/hir/resolution/import.rs index 41fdac746bd..e6ac33053a0 100644 --- a/compiler/noirc_frontend/src/hir/resolution/import.rs +++ b/compiler/noirc_frontend/src/hir/resolution/import.rs @@ -1,4 +1,3 @@ -use iter_extended::partition_results; use noirc_errors::{CustomDiagnostic, Span}; use crate::graph::CrateId; @@ -51,29 +50,27 @@ impl From for CustomDiagnostic { } } -pub fn resolve_imports( +pub fn resolve_import( crate_id: CrateId, - imports_to_resolve: Vec, + import_directive: ImportDirective, def_maps: &BTreeMap, -) -> (Vec, Vec<(PathResolutionError, LocalModuleId)>) { +) -> Result { let def_map = &def_maps[&crate_id]; - partition_results(imports_to_resolve, |import_directive| { - let allow_contracts = - allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); - - let module_scope = import_directive.module_id; - let resolved_namespace = - resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) - .map_err(|error| (error, module_scope))?; - - let name = resolve_path_name(&import_directive); - Ok(ResolvedImport { - name, - resolved_namespace, - module_scope, - is_prelude: import_directive.is_prelude, - }) + let allow_contracts = + allow_referencing_contracts(def_maps, crate_id, import_directive.module_id); + + let module_scope = import_directive.module_id; + let resolved_namespace = + resolve_path_to_ns(&import_directive, def_map, def_maps, allow_contracts) + .map_err(|error| (error, module_scope))?; + + let name = resolve_path_name(&import_directive); + Ok(ResolvedImport { + name, + resolved_namespace, + module_scope, + is_prelude: import_directive.is_prelude, }) } diff --git a/compiler/noirc_frontend/src/hir/resolution/resolver.rs b/compiler/noirc_frontend/src/hir/resolution/resolver.rs index 9cd28d80784..492e96a4715 100644 --- a/compiler/noirc_frontend/src/hir/resolution/resolver.rs +++ b/compiler/noirc_frontend/src/hir/resolution/resolver.rs @@ -15,7 +15,7 @@ use crate::hir_def::expr::{ HirArrayLiteral, HirBinaryOp, HirBlockExpression, HirCallExpression, HirCapturedVar, HirCastExpression, HirConstructorExpression, HirExpression, HirIdent, HirIfExpression, HirIndexExpression, HirInfixExpression, HirLambda, HirLiteral, HirMemberAccess, - HirMethodCallExpression, HirPrefixExpression, + HirMethodCallExpression, HirPrefixExpression, ImplKind, }; use crate::hir_def::traits::{Trait, TraitConstraint}; @@ -29,7 +29,7 @@ use crate::hir::def_map::{LocalModuleId, ModuleDefId, TryFromModuleDefId, MAIN_F use crate::hir_def::stmt::{HirAssignStatement, HirForStatement, HirLValue, HirPattern}; use crate::node_interner::{ DefinitionId, DefinitionKind, ExprId, FuncId, NodeInterner, StmtId, StructId, TraitId, - TraitImplId, TraitImplKind, + TraitImplId, TraitMethodId, }; use crate::{ hir::{def_map::CrateDefMap, resolution::path_resolver::PathResolver}, @@ -39,9 +39,9 @@ use crate::{ use crate::{ ArrayLiteral, ContractFunctionType, Distinctness, ForRange, FunctionDefinition, FunctionReturnType, FunctionVisibility, Generics, LValue, NoirStruct, NoirTypeAlias, Param, - Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeBinding, TypeVariable, - UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, UnresolvedTypeData, - UnresolvedTypeExpression, Visibility, ERROR_IDENT, + Path, PathKind, Pattern, Shared, StructType, Type, TypeAliasType, TypeVariable, + TypeVariableKind, UnaryOp, UnresolvedGenerics, UnresolvedTraitConstraint, UnresolvedType, + UnresolvedTypeData, UnresolvedTypeExpression, Visibility, ERROR_IDENT, }; use fm::FileId; use iter_extended::vecmap; @@ -95,7 +95,7 @@ pub struct Resolver<'a> { /// True if the current module is a contract. /// This is usually determined by self.path_resolver.module_id(), but it can - /// be overriden for impls. Impls are an odd case since the methods within resolve + /// be overridden for impls. Impls are an odd case since the methods within resolve /// as if they're in the parent module, but should be placed in a child module. /// Since they should be within a child module, in_contract is manually set to false /// for these so we can still resolve them in the parent module without them being in a contract. @@ -267,7 +267,7 @@ impl<'a> Resolver<'a> { let has_underscore_prefix = variable_name.starts_with('_'); // XXX: This is used for development mode, and will be removed metadata.warn_if_unused && metadata.num_times_used == 0 && !has_underscore_prefix }); - unused_vars.extend(unused_variables.map(|(_, meta)| meta.ident)); + unused_vars.extend(unused_variables.map(|(_, meta)| meta.ident.clone())); } /// Run the given function in a new scope. @@ -304,8 +304,9 @@ impl<'a> Resolver<'a> { let location = Location::new(name.span(), self.file); let id = self.interner.push_definition(name.0.contents.clone(), mutable, definition, location); - let ident = HirIdent { location, id }; - let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused }; + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused }; let scope = self.scopes.get_mut_scope(); let old_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); @@ -325,8 +326,6 @@ impl<'a> Resolver<'a> { fn add_global_variable_decl(&mut self, name: Ident, definition: DefinitionKind) -> HirIdent { let scope = self.scopes.get_mut_scope(); - let ident; - let resolver_meta; // This check is necessary to maintain the same definition ids in the interner. Currently, each function uses a new resolver that has its own ScopeForest and thus global scope. // We must first check whether an existing definition ID has been inserted as otherwise there will be multiple definitions for the same global statement. @@ -341,17 +340,20 @@ impl<'a> Resolver<'a> { } } - if let Some(id) = stmt_id { + let (ident, resolver_meta) = if let Some(id) = stmt_id { let hir_let_stmt = self.interner.let_statement(&id); - ident = hir_let_stmt.ident(); - resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + let ident = hir_let_stmt.ident(); + let resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; + (hir_let_stmt.ident(), resolver_meta) } else { let location = Location::new(name.span(), self.file); let id = self.interner.push_definition(name.0.contents.clone(), false, definition, location); - ident = HirIdent { location, id }; - resolver_meta = ResolverMeta { num_times_used: 0, ident, warn_if_unused: true }; - } + let ident = HirIdent::non_trait_method(id, location); + let resolver_meta = + ResolverMeta { num_times_used: 0, ident: ident.clone(), warn_if_unused: true }; + (ident, resolver_meta) + }; let old_global_value = scope.add_key_value(name.0.contents.clone(), resolver_meta); if let Some(old_global_value) = old_global_value { @@ -376,7 +378,7 @@ impl<'a> Resolver<'a> { self.push_err(error); let id = DefinitionId::dummy_id(); let location = Location::new(name.span(), self.file); - (HirIdent { location, id }, 0) + (HirIdent::non_trait_method(id, location), 0) }) } @@ -389,7 +391,7 @@ impl<'a> Resolver<'a> { if let Some((variable_found, scope)) = variable { variable_found.num_times_used += 1; let id = variable_found.ident.id; - Ok((HirIdent { location, id }, scope)) + Ok((HirIdent::non_trait_method(id, location), scope)) } else { Err(ResolverError::VariableNotDeclared { name: name.0.contents.clone(), @@ -419,8 +421,27 @@ impl<'a> Resolver<'a> { constraint: UnresolvedTraitConstraint, ) -> Option { let typ = self.resolve_type(constraint.typ); - let trait_id = self.lookup_trait_or_error(constraint.trait_bound.trait_path)?.id; - Some(TraitConstraint { typ, trait_id }) + let trait_generics = + vecmap(constraint.trait_bound.trait_generics, |typ| self.resolve_type(typ)); + + let span = constraint.trait_bound.trait_path.span(); + let the_trait = self.lookup_trait_or_error(constraint.trait_bound.trait_path)?; + let trait_id = the_trait.id; + + let expected_generics = the_trait.generics.len(); + let actual_generics = trait_generics.len(); + + if actual_generics != expected_generics { + let item_name = the_trait.name.to_string(); + self.push_err(ResolverError::IncorrectGenericCount { + span, + item_name, + actual: actual_generics, + expected: expected_generics, + }); + } + + Some(TraitConstraint { typ, trait_id, trait_generics }) } /// Translates an UnresolvedType into a Type and appends any @@ -428,7 +449,7 @@ impl<'a> Resolver<'a> { fn resolve_type_inner(&mut self, typ: UnresolvedType, new_variables: &mut Generics) -> Type { use UnresolvedTypeData::*; - match typ.typ { + let resolved_type = match typ.typ { FieldElement => Type::FieldElement, Array(size, elem) => { let elem = Box::new(self.resolve_type_inner(*elem, new_variables)); @@ -489,7 +510,18 @@ impl<'a> Resolver<'a> { Type::MutableReference(Box::new(self.resolve_type_inner(*element, new_variables))) } Parenthesized(typ) => self.resolve_type_inner(*typ, new_variables), + }; + + if let Type::Struct(_, _) = resolved_type { + if let Some(unresolved_span) = typ.span { + // Record the location of the type reference + self.interner.push_type_ref_location( + resolved_type.clone(), + Location::new(unresolved_span, self.file), + ); + } } + resolved_type } fn find_generic(&self, target_name: &str) -> Option<&(Rc, TypeVariable, Span)> { @@ -537,6 +569,10 @@ impl<'a> Resolver<'a> { let result = self.interner.get_type_alias(id).get_type(&args); + // Collecting Type Alias references [Location]s to be used by LSP in order + // to resolve the definition of the type alias + self.interner.add_type_alias_ref(id, Location::new(span, self.file)); + // Because there is no ordering to when type aliases (and other globals) are resolved, // it is possible for one to refer to an Error type and issue no error if it is set // equal to another type alias. Fixing this fully requires an analysis to create a DFG @@ -564,11 +600,13 @@ impl<'a> Resolver<'a> { fn resolve_trait_as_type( &mut self, path: Path, - _args: Vec, - _new_variables: &mut Generics, + args: Vec, + new_variables: &mut Generics, ) -> Type { + let args = vecmap(args, |arg| self.resolve_type_inner(arg, new_variables)); + if let Some(t) = self.lookup_trait_or_error(path) { - Type::TraitAsType(t.id, Rc::new(t.name.to_string())) + Type::TraitAsType(t.id, Rc::new(t.name.to_string()), args) } else { Type::Error } @@ -584,7 +622,7 @@ impl<'a> Resolver<'a> { if args.len() != expected_count { self.errors.push(ResolverError::IncorrectGenericCount { span, - struct_type: type_name(), + item_name: type_name(), actual: args.len(), expected: expected_count, }); @@ -620,7 +658,7 @@ impl<'a> Resolver<'a> { None => { let id = self.interner.next_type_variable_id(); let typevar = TypeVariable::unbound(id); - new_variables.push((id, typevar.clone())); + new_variables.push(typevar.clone()); // 'Named'Generic is a bit of a misnomer here, we want a type variable that // wont be bound over but this one has no name since we do not currently @@ -667,22 +705,28 @@ impl<'a> Resolver<'a> { Some(Ok(found)) => return found, // Try to look it up as a global, but still issue the first error if we fail Some(Err(error)) => match self.lookup_global(path) { - Ok(id) => return (HirIdent { location, id }, 0), + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), Err(_) => error, }, None => match self.lookup_global(path) { - Ok(id) => return (HirIdent { location, id }, 0), + Ok(id) => return (HirIdent::non_trait_method(id, location), 0), Err(error) => error, }, }; self.push_err(error); let id = DefinitionId::dummy_id(); - (HirIdent { location, id }, 0) + (HirIdent::non_trait_method(id, location), 0) } /// Translates an UnresolvedType to a Type pub fn resolve_type(&mut self, typ: UnresolvedType) -> Type { - self.resolve_type_inner(typ, &mut vec![]) + let span = typ.span; + let resolved_type = self.resolve_type_inner(typ, &mut vec![]); + if resolved_type.is_nested_slice() { + self.errors.push(ResolverError::NestedSlices { span: span.unwrap() }); + } + + resolved_type } pub fn resolve_type_aliases( @@ -736,7 +780,6 @@ impl<'a> Resolver<'a> { let name = Rc::new(generic.0.contents.clone()); if let Some((_, _, first_span)) = self.find_generic(&name) { - let span = generic.0.span(); self.errors.push(ResolverError::DuplicateDefinition { name: generic.0.contents.clone(), first_span: *first_span, @@ -746,10 +789,36 @@ impl<'a> Resolver<'a> { self.generics.push((name, typevar.clone(), span)); } - (id, typevar) + typevar }) } + /// Add the given existing generics to scope. + /// This is useful for adding the same generics to many items. E.g. apply impl generics + /// to each function in the impl or trait generics to each item in the trait. + pub fn add_existing_generics(&mut self, names: &UnresolvedGenerics, generics: &Generics) { + assert_eq!(names.len(), generics.len()); + + for (name, typevar) in names.iter().zip(generics) { + self.add_existing_generic(&name.0.contents, name.0.span(), typevar.clone()); + } + } + + pub fn add_existing_generic(&mut self, name: &str, span: Span, typevar: TypeVariable) { + // Check for name collisions of this generic + let rc_name = Rc::new(name.to_owned()); + + if let Some((_, _, first_span)) = self.find_generic(&rc_name) { + self.errors.push(ResolverError::DuplicateDefinition { + name: name.to_owned(), + first_span: *first_span, + second_span: span, + }); + } else { + self.generics.push((rc_name, typevar, span)); + } + } + pub fn resolve_struct_fields( mut self, unresolved: NoirStruct, @@ -778,12 +847,13 @@ impl<'a> Resolver<'a> { /// there's a bunch of other places where trait constraints can pop up fn resolve_trait_constraints( &mut self, - where_clause: &Vec, + where_clause: &[UnresolvedTraitConstraint], ) -> Vec { - vecmap(where_clause, |constraint| TraitConstraint { - typ: self.resolve_type(constraint.typ.clone()), - trait_id: constraint.trait_bound.trait_id.unwrap_or_else(TraitId::dummy_id), - }) + where_clause + .iter() + .cloned() + .filter_map(|constraint| self.resolve_trait_constraint(constraint)) + .collect() } /// Extract metadata from a NoirFunction @@ -793,18 +863,11 @@ impl<'a> Resolver<'a> { fn extract_meta(&mut self, func: &NoirFunction, func_id: FuncId) -> FuncMeta { let location = Location::new(func.name_ident().span(), self.file); let id = self.interner.function_definition_id(func_id); - let name_ident = HirIdent { id, location }; + let name_ident = HirIdent::non_trait_method(id, location); let attributes = func.attributes().clone(); - let mut generics = - vecmap(self.generics.clone(), |(name, typevar, _)| match &*typevar.borrow() { - TypeBinding::Unbound(id) => (*id, typevar.clone()), - TypeBinding::Bound(binding) => { - unreachable!("Expected {} to be unbound, but it is bound to {}", name, binding) - } - }); - + let mut generics = vecmap(&self.generics, |(_, typevar, _)| typevar.clone()); let mut parameters = vec![]; let mut parameter_types = vec![]; @@ -1115,7 +1178,7 @@ impl<'a> Resolver<'a> { match lvalue { LValue::Ident(ident) => { let ident = self.find_variable_or_default(&ident); - self.resolve_local_variable(ident.0, ident.1); + self.resolve_local_variable(ident.0.clone(), ident.1); HirLValue::Ident(ident.0, Type::Error) } @@ -1201,9 +1264,10 @@ impl<'a> Resolver<'a> { .position(|capture| capture.ident.id == hir_ident.id); if pos.is_none() { - self.lambda_stack[lambda_index] - .captures - .push(HirCapturedVar { ident: hir_ident, transitive_capture_index }); + self.lambda_stack[lambda_index].captures.push(HirCapturedVar { + ident: hir_ident.clone(), + transitive_capture_index, + }); } if lambda_index + 1 < self.lambda_stack.len() { @@ -1250,12 +1314,13 @@ impl<'a> Resolver<'a> { Literal::Unit => HirLiteral::Unit, }), ExpressionKind::Variable(path) => { - if let Some((hir_expr, object_type)) = self.resolve_trait_generic_path(&path) { - let expr_id = self.interner.push_expr(hir_expr); - self.interner.push_expr_location(expr_id, expr.span, self.file); - self.interner - .select_impl_for_ident(expr_id, TraitImplKind::Assumed { object_type }); - return expr_id; + if let Some((method, constraint, assumed)) = self.resolve_trait_generic_path(&path) + { + HirExpression::Ident(HirIdent { + location: Location::new(expr.span, self.file), + id: self.interner.trait_method_id(method), + impl_kind: ImplKind::TraitMethod(method, constraint, assumed), + }) } else { // If the Path is being used as an Expression, then it is referring to a global from a separate module // Otherwise, then it is referring to an Identifier @@ -1290,7 +1355,7 @@ impl<'a> Resolver<'a> { } DefinitionKind::Local(_) => { // only local variables can be captured by closures. - self.resolve_local_variable(hir_ident, var_scope_index); + self.resolve_local_variable(hir_ident.clone(), var_scope_index); } } } @@ -1313,10 +1378,12 @@ impl<'a> Resolver<'a> { ExpressionKind::Infix(infix) => { let lhs = self.resolve_expression(infix.lhs); let rhs = self.resolve_expression(infix.rhs); + let trait_id = self.interner.get_operator_trait_method(infix.operator.contents); HirExpression::Infix(HirInfixExpression { lhs, operator: HirBinaryOp::new(infix.operator, self.file), + trait_method_id: trait_id, rhs, }) } @@ -1628,7 +1695,7 @@ impl<'a> Resolver<'a> { fn resolve_trait_static_method_by_self( &mut self, path: &Path, - ) -> Option<(HirExpression, Type)> { + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { let trait_id = self.trait_id?; if path.kind == PathKind::Plain && path.segments.len() == 2 { @@ -1638,15 +1705,23 @@ impl<'a> Resolver<'a> { if name == SELF_TYPE_NAME { let the_trait = self.interner.get_trait(trait_id); let method = the_trait.find_method(method.0.contents.as_str())?; - let self_type = self.self_type.clone()?; - return Some((HirExpression::TraitMethodReference(method), self_type)); + + let constraint = TraitConstraint { + typ: self.self_type.clone()?, + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); } } None } // this resolves TraitName::some_static_method - fn resolve_trait_static_method(&mut self, path: &Path) -> Option<(HirExpression, Type)> { + fn resolve_trait_static_method( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { if path.kind == PathKind::Plain && path.segments.len() == 2 { let method = &path.segments[1]; @@ -1656,17 +1731,27 @@ impl<'a> Resolver<'a> { let the_trait = self.interner.get_trait(trait_id); let method = the_trait.find_method(method.0.contents.as_str())?; - let self_type = Type::type_variable(the_trait.self_type_typevar_id); - return Some((HirExpression::TraitMethodReference(method), self_type)); + let constraint = TraitConstraint { + typ: Type::TypeVariable( + the_trait.self_type_typevar.clone(), + TypeVariableKind::Normal, + ), + trait_generics: Type::from_generics(&the_trait.generics), + trait_id, + }; + return Some((method, constraint, false)); } None } - // this resolves a static trait method T::trait_method by iterating over the where clause + // This resolves a static trait method T::trait_method by iterating over the where clause + // + // Returns the trait method, object type, and the trait generics. + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` fn resolve_trait_method_by_named_generic( &mut self, path: &Path, - ) -> Option<(HirExpression, Type)> { + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { if path.segments.len() != 2 { return None; } @@ -1687,8 +1772,14 @@ impl<'a> Resolver<'a> { if let Some(method) = the_trait.find_method(path.segments.last().unwrap().0.contents.as_str()) { - let self_type = self.resolve_type(typ.clone()); - return Some((HirExpression::TraitMethodReference(method), self_type)); + let constraint = TraitConstraint { + trait_id, + typ: self.resolve_type(typ.clone()), + trait_generics: vecmap(trait_bound.trait_generics, |typ| { + self.resolve_type(typ) + }), + }; + return Some((method, constraint, true)); } } } @@ -1696,7 +1787,14 @@ impl<'a> Resolver<'a> { None } - fn resolve_trait_generic_path(&mut self, path: &Path) -> Option<(HirExpression, Type)> { + // Try to resolve the given trait method path. + // + // Returns the trait method, object type, and the trait generics. + // E.g. `t.method()` with `where T: Foo` in scope will return `(Foo::method, T, vec![Bar])` + fn resolve_trait_generic_path( + &mut self, + path: &Path, + ) -> Option<(TraitMethodId, TraitConstraint, bool)> { self.resolve_trait_static_method_by_self(path) .or_else(|| self.resolve_trait_static_method(path)) .or_else(|| self.resolve_trait_method_by_named_generic(path)) @@ -1761,7 +1859,8 @@ impl<'a> Resolver<'a> { let variable = scope_tree.find(ident_name); if let Some((old_value, _)) = variable { old_value.num_times_used += 1; - let expr_id = self.interner.push_expr(HirExpression::Ident(old_value.ident)); + let ident = HirExpression::Ident(old_value.ident.clone()); + let expr_id = self.interner.push_expr(ident); self.interner.push_expr_location(expr_id, call_expr_span, self.file); fmt_str_idents.push(expr_id); } else if ident_name.parse::().is_ok() { diff --git a/compiler/noirc_frontend/src/hir/resolution/structs.rs b/compiler/noirc_frontend/src/hir/resolution/structs.rs index 72a7b736436..cf3e3436c88 100644 --- a/compiler/noirc_frontend/src/hir/resolution/structs.rs +++ b/compiler/noirc_frontend/src/hir/resolution/structs.rs @@ -24,6 +24,10 @@ pub(crate) fn resolve_structs( crate_id: CrateId, ) -> Vec<(CompilationError, FileId)> { let mut errors: Vec<(CompilationError, FileId)> = vec![]; + // This is necessary to avoid cloning the entire struct map + // when adding checks after each struct field is resolved. + let struct_ids = structs.keys().copied().collect::>(); + // Resolve each field in each struct. // Each struct should already be present in the NodeInterner after def collection. for (type_id, typ) in structs { @@ -35,6 +39,28 @@ pub(crate) fn resolve_structs( struct_def.generics = generics; }); } + + // Check whether the struct fields have nested slices + // We need to check after all structs are resolved to + // make sure every struct's fields is accurately set. + for id in struct_ids { + let struct_type = context.def_interner.get_struct(id); + // Only handle structs without generics as any generics args will be checked + // after monomorphization when performing SSA codegen + if struct_type.borrow().generics.is_empty() { + let fields = struct_type.borrow().get_fields(&[]); + for field in fields.iter() { + if field.1.is_nested_slice() { + errors.push(( + ResolverError::NestedSlices { span: struct_type.borrow().location.span } + .into(), + struct_type.borrow().location.file, + )); + } + } + } + } + errors } @@ -49,5 +75,6 @@ fn resolve_struct_fields( let (generics, fields, errors) = Resolver::new(&mut context.def_interner, &path_resolver, &context.def_maps, file_id) .resolve_struct_fields(unresolved.struct_def); + (generics, fields, errors) } diff --git a/compiler/noirc_frontend/src/hir/resolution/traits.rs b/compiler/noirc_frontend/src/hir/resolution/traits.rs index 54d2630c722..8f966be312b 100644 --- a/compiler/noirc_frontend/src/hir/resolution/traits.rs +++ b/compiler/noirc_frontend/src/hir/resolution/traits.rs @@ -18,7 +18,7 @@ use crate::{ }, hir_def::traits::{TraitConstant, TraitFunction, TraitImpl, TraitType}, node_interner::{FuncId, NodeInterner, TraitId}, - Path, Shared, TraitItem, Type, TypeBinding, TypeVariableKind, + Generics, Path, Shared, TraitItem, Type, TypeVariable, TypeVariableKind, }; use super::{ @@ -38,8 +38,13 @@ pub(crate) fn resolve_traits( for (trait_id, unresolved_trait) in &traits { context.def_interner.push_empty_trait(*trait_id, unresolved_trait); } - let mut res: Vec<(CompilationError, FileId)> = vec![]; + let mut all_errors = Vec::new(); + for (trait_id, unresolved_trait) in traits { + let generics = vecmap(&unresolved_trait.trait_def.generics, |_| { + TypeVariable::unbound(context.def_interner.next_type_variable_id()) + }); + // Resolve order // 1. Trait Types ( Trait constants can have a trait type, therefore types before constants) let _ = resolve_trait_types(context, crate_id, &unresolved_trait); @@ -47,13 +52,23 @@ pub(crate) fn resolve_traits( let _ = resolve_trait_constants(context, crate_id, &unresolved_trait); // 3. Trait Methods let (methods, errors) = - resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait); - res.extend(errors); + resolve_trait_methods(context, trait_id, crate_id, &unresolved_trait, &generics); + + all_errors.extend(errors); + context.def_interner.update_trait(trait_id, |trait_def| { trait_def.set_methods(methods); + trait_def.generics = generics; }); + + // This check needs to be after the trait's methods are set since + // the interner may set `interner.ordering_type` based on the result type + // of the Cmp trait, if this is it. + if crate_id.is_stdlib() { + context.def_interner.try_add_operator_trait(trait_id); + } } - res + all_errors } fn resolve_trait_types( @@ -78,6 +93,7 @@ fn resolve_trait_methods( trait_id: TraitId, crate_id: CrateId, unresolved_trait: &UnresolvedTrait, + trait_generics: &Generics, ) -> (Vec, Vec<(CompilationError, FileId)>) { let interner = &mut context.def_interner; let def_maps = &mut context.def_maps; @@ -102,12 +118,15 @@ fn resolve_trait_methods( } = item { let the_trait = interner.get_trait(trait_id); - let self_type = - Type::TypeVariable(the_trait.self_type_typevar.clone(), TypeVariableKind::Normal); + let self_typevar = the_trait.self_type_typevar.clone(); + let self_type = Type::TypeVariable(self_typevar.clone(), TypeVariableKind::Normal); + let name_span = the_trait.name.span(); let mut resolver = Resolver::new(interner, &path_resolver, def_maps, file); resolver.add_generics(generics); - resolver.set_self_type(Some(self_type)); + resolver.add_existing_generics(&unresolved_trait.trait_def.generics, trait_generics); + resolver.add_existing_generic("Self", name_span, self_typevar); + resolver.set_self_type(Some(self_type.clone())); let func_id = unresolved_trait.method_ids[&name.0.contents]; let (_, func_meta) = resolver.resolve_trait_function( @@ -122,16 +141,7 @@ fn resolve_trait_methods( let arguments = vecmap(parameters, |param| resolver.resolve_type(param.1.clone())); let return_type = resolver.resolve_type(return_type.get_type().into_owned()); - let mut generics = vecmap(resolver.get_generics(), |(_, type_var, _)| match &*type_var - .borrow() - { - TypeBinding::Unbound(id) => (*id, type_var.clone()), - TypeBinding::Bound(binding) => unreachable!("Trait generic was bound to {binding}"), - }); - - // Ensure the trait is generic over the Self type as well - let the_trait = resolver.interner.get_trait(trait_id); - generics.push((the_trait.self_type_typevar_id, the_trait.self_type_typevar.clone())); + let generics = vecmap(resolver.get_generics(), |(_, type_var, _)| type_var.clone()); let default_impl_list: Vec<_> = unresolved_trait .fns_with_default_impl @@ -152,9 +162,8 @@ fn resolve_trait_methods( functions.push(TraitFunction { name: name.clone(), typ: Type::Forall(generics, Box::new(function_type)), - span: name.span(), + location: Location::new(name.span(), unresolved_trait.file_id), default_impl, - default_impl_file_id: unresolved_trait.file_id, default_impl_module_id: unresolved_trait.module_id, }); @@ -376,9 +385,12 @@ pub(crate) fn resolve_trait_impls( let mut resolver = Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); resolver.add_generics(&trait_impl.generics); - let self_type = resolver.resolve_type(unresolved_type.clone()); - let generics = resolver.get_generics().to_vec(); + let trait_generics = + vecmap(&trait_impl.trait_generics, |generic| resolver.resolve_type(generic.clone())); + + let self_type = resolver.resolve_type(unresolved_type.clone()); + let impl_generics = resolver.get_generics().to_vec(); let impl_id = interner.next_trait_impl_id(); let mut impl_methods = functions::resolve_function_set( @@ -388,7 +400,7 @@ pub(crate) fn resolve_trait_impls( trait_impl.methods.clone(), Some(self_type.clone()), Some(impl_id), - generics.clone(), + impl_generics.clone(), errors, ); @@ -408,7 +420,7 @@ pub(crate) fn resolve_trait_impls( let mut new_resolver = Resolver::new(interner, &path_resolver, &context.def_maps, trait_impl.file_id); - new_resolver.set_generics(generics); + new_resolver.set_generics(impl_generics.clone()); new_resolver.set_self_type(Some(self_type.clone())); if let Some(trait_id) = maybe_trait_id { @@ -416,7 +428,10 @@ pub(crate) fn resolve_trait_impls( &mut new_resolver, &impl_methods, trait_id, + trait_impl.trait_path.span(), + trait_impl.trait_generics, trait_impl.generics.len(), + trait_impl.file_id, errors, ); @@ -426,19 +441,27 @@ pub(crate) fn resolve_trait_impls( .flat_map(|item| new_resolver.resolve_trait_constraint(item)) .collect(); + let resolver_errors = new_resolver.take_errors().into_iter(); + errors.extend(resolver_errors.map(|error| (error.into(), trait_impl.file_id))); + let resolved_trait_impl = Shared::new(TraitImpl { ident: trait_impl.trait_path.last_segment().clone(), typ: self_type.clone(), trait_id, + trait_generics: trait_generics.clone(), file: trait_impl.file_id, where_clause, methods: vecmap(&impl_methods, |(_, func_id)| *func_id), }); + let impl_generics = vecmap(impl_generics, |(_, type_variable, _)| type_variable); + if let Err((prev_span, prev_file)) = interner.add_trait_implementation( self_type.clone(), trait_id, + trait_generics, impl_id, + impl_generics, resolved_trait_impl, ) { let error = DefCollectorErrorKind::OverlappingImpl { diff --git a/compiler/noirc_frontend/src/hir/type_check/expr.rs b/compiler/noirc_frontend/src/hir/type_check/expr.rs index f7154895150..58cf4e7b289 100644 --- a/compiler/noirc_frontend/src/hir/type_check/expr.rs +++ b/compiler/noirc_frontend/src/hir/type_check/expr.rs @@ -6,19 +6,19 @@ use crate::{ hir_def::{ expr::{ self, HirArrayLiteral, HirBinaryOp, HirExpression, HirLiteral, HirMethodCallExpression, - HirMethodReference, HirPrefixExpression, + HirMethodReference, HirPrefixExpression, ImplKind, }, types::Type, }, node_interner::{DefinitionKind, ExprId, FuncId, TraitId, TraitImplKind, TraitMethodId}, - BinaryOpKind, Signedness, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, + BinaryOpKind, TypeBinding, TypeBindings, TypeVariableKind, UnaryOp, }; use super::{errors::TypeCheckError, TypeChecker}; impl<'interner> TypeChecker<'interner> { fn check_if_deprecated(&mut self, expr: &ExprId) { - if let HirExpression::Ident(expr::HirIdent { location, id }) = + if let HirExpression::Ident(expr::HirIdent { location, id, impl_kind: _ }) = self.interner.expression(expr) { if let Some(DefinitionKind::Function(func_id)) = @@ -52,6 +52,10 @@ impl<'interner> TypeChecker<'interner> { // We must instantiate identifiers at every call site to replace this T with a new type // variable to handle generic functions. let t = self.interner.id_type_substitute_trait_as_type(ident.id); + + // This instantiate's a trait's generics as well which need to be set + // when the constraint below is later solved for when the function is + // finished. How to link the two? let (typ, bindings) = t.instantiate(self.interner); // Push any trait constraints required by this definition to the context @@ -59,13 +63,30 @@ impl<'interner> TypeChecker<'interner> { if let Some(definition) = self.interner.try_definition(ident.id) { if let DefinitionKind::Function(function) = definition.kind { let function = self.interner.function_meta(&function); + for mut constraint in function.trait_constraints.clone() { - constraint.typ = constraint.typ.substitute(&bindings); + constraint.apply_bindings(&bindings); self.trait_constraints.push((constraint, *expr_id)); } } } + if let ImplKind::TraitMethod(_, mut constraint, assumed) = ident.impl_kind { + constraint.apply_bindings(&bindings); + if assumed { + let trait_impl = TraitImplKind::Assumed { + object_type: constraint.typ, + trait_generics: constraint.trait_generics, + }; + self.interner.select_impl_for_expression(*expr_id, trait_impl); + } else { + // Currently only one impl can be selected per expr_id, so this + // constraint needs to be pushed after any other constraints so + // that monomorphization can resolve this trait method to the correct impl. + self.trait_constraints.push((constraint, *expr_id)); + } + } + self.interner.store_instantiation_bindings(*expr_id, bindings); typ } @@ -136,11 +157,28 @@ impl<'interner> TypeChecker<'interner> { let rhs_span = self.interner.expr_span(&infix_expr.rhs); let span = lhs_span.merge(rhs_span); - self.infix_operand_type_rules(&lhs_type, &infix_expr.operator, &rhs_type, span) - .unwrap_or_else(|error| { + let operator = &infix_expr.operator; + match self.infix_operand_type_rules(&lhs_type, operator, &rhs_type, span) { + Ok((typ, use_impl)) => { + if use_impl { + let id = infix_expr.trait_method_id; + // Assume operators have no trait generics + self.verify_trait_constraint( + &lhs_type, + id.trait_id, + &[], + *expr_id, + span, + ); + self.typecheck_operator_method(*expr_id, id, &lhs_type, span); + } + typ + } + Err(error) => { self.errors.push(error); Type::Error - }) + } + } } HirExpression::Index(index_expr) => self.check_index_expression(expr_id, index_expr), HirExpression::Call(call_expr) => { @@ -180,10 +218,11 @@ impl<'interner> TypeChecker<'interner> { // Automatically add `&mut` if the method expects a mutable reference and // the object is not already one. if *func_id != FuncId::dummy_id() { - let func_meta = self.interner.function_meta(func_id); + let function_type = + self.interner.function_meta(func_id).typ.clone(); self.try_add_mutable_reference_to_object( &mut method_call, - &func_meta.typ, + &function_type, &mut args, ); } @@ -196,11 +235,12 @@ impl<'interner> TypeChecker<'interner> { .trait_id }) } - HirMethodReference::TraitMethodId(method) => Some(method.trait_id), + HirMethodReference::TraitMethodId(method, _) => Some(method.trait_id), }; let (function_id, function_call) = method_call.into_function_call( - method_ref.clone(), + &method_ref, + object_type.clone(), location, self.interner, ); @@ -209,7 +249,15 @@ impl<'interner> TypeChecker<'interner> { let ret = self.check_method_call(&function_id, method_ref, args, span); if let Some(trait_id) = trait_id { - self.verify_trait_constraint(&object_type, trait_id, function_id, span); + // Assume no trait generics were specified + // TODO: Fill in type variables + self.verify_trait_constraint( + &object_type, + trait_id, + &[], + function_id, + span, + ); } self.interner.replace_expr(expr_id, function_call); @@ -287,28 +335,6 @@ impl<'interner> TypeChecker<'interner> { Type::Function(params, Box::new(lambda.return_type), Box::new(env_type)) } - HirExpression::TraitMethodReference(method) => { - let the_trait = self.interner.get_trait(method.trait_id); - let typ2 = &the_trait.methods[method.method_index].typ; - let (typ, mut bindings) = typ2.instantiate(self.interner); - - // We must also remember to apply these substitutions to the object_type - // referenced by the selected trait impl, if one has yet to be selected. - let impl_kind = self.interner.get_selected_impl_for_ident(*expr_id); - if let Some(TraitImplKind::Assumed { object_type }) = impl_kind { - let the_trait = self.interner.get_trait(method.trait_id); - let object_type = object_type.substitute(&bindings); - bindings.insert( - the_trait.self_type_typevar_id, - (the_trait.self_type_typevar.clone(), object_type.clone()), - ); - self.interner - .select_impl_for_ident(*expr_id, TraitImplKind::Assumed { object_type }); - } - - self.interner.store_instantiation_bindings(*expr_id, bindings); - typ - } }; self.interner.push_expr_type(expr_id, typ.clone()); @@ -319,11 +345,14 @@ impl<'interner> TypeChecker<'interner> { &mut self, object_type: &Type, trait_id: TraitId, + trait_generics: &[Type], function_ident_id: ExprId, span: Span, ) { - match self.interner.lookup_trait_implementation(object_type, trait_id) { - Ok(impl_kind) => self.interner.select_impl_for_ident(function_ident_id, impl_kind), + match self.interner.lookup_trait_implementation(object_type, trait_id, trait_generics) { + Ok(impl_kind) => { + self.interner.select_impl_for_expression(function_ident_id, impl_kind); + } Err(erroring_constraints) => { // Don't show any errors where try_get_trait returns None. // This can happen if a trait is used that was never declared. @@ -331,7 +360,12 @@ impl<'interner> TypeChecker<'interner> { .into_iter() .map(|constraint| { let r#trait = self.interner.try_get_trait(constraint.trait_id)?; - Some((constraint.typ, r#trait.name.to_string())) + let mut name = r#trait.name.to_string(); + if !constraint.trait_generics.is_empty() { + let generics = vecmap(&constraint.trait_generics, ToString::to_string); + name += &format!("<{}>", generics.join(", ")); + } + Some((constraint.typ, name)) }) .collect::>>(); @@ -541,7 +575,7 @@ impl<'interner> TypeChecker<'interner> { arguments: Vec<(Type, ExprId, Span)>, span: Span, ) -> Type { - let (fn_typ, param_len) = match method_ref { + let (fn_typ, param_len, generic_bindings) = match method_ref { HirMethodReference::FuncId(func_id) => { if func_id == FuncId::dummy_id() { return Type::Error; @@ -549,12 +583,22 @@ impl<'interner> TypeChecker<'interner> { let func_meta = self.interner.function_meta(&func_id); let param_len = func_meta.parameters.len(); - (func_meta.typ, param_len) + (func_meta.typ.clone(), param_len, TypeBindings::new()) } - HirMethodReference::TraitMethodId(method) => { + HirMethodReference::TraitMethodId(method, generics) => { let the_trait = self.interner.get_trait(method.trait_id); let method = &the_trait.methods[method.method_index]; - (method.typ.clone(), method.arguments().len()) + + // These are any bindings from the trait's generics itself, + // rather than an impl or method's generics. + let generic_bindings = the_trait + .generics + .iter() + .zip(generics) + .map(|(var, arg)| (var.id(), (var.clone(), arg))) + .collect(); + + (method.typ.clone(), method.arguments().len(), generic_bindings) } }; @@ -568,11 +612,12 @@ impl<'interner> TypeChecker<'interner> { }); } - let (function_type, instantiation_bindings) = fn_typ.instantiate(self.interner); + let (function_type, instantiation_bindings) = + fn_typ.instantiate_with_bindings(generic_bindings, self.interner); self.interner.store_instantiation_bindings(*function_ident_id, instantiation_bindings); self.interner.push_expr_type(function_ident_id, function_type.clone()); - self.bind_function_type(function_type, arguments, span) + self.bind_function_type(function_type.clone(), arguments, span) } fn check_if_expr(&mut self, if_expr: &expr::HirIfExpression, expr_id: &ExprId) -> Type { @@ -753,19 +798,22 @@ impl<'interner> TypeChecker<'interner> { None } + // Given a binary comparison operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn comparator_operand_type_rules( &mut self, lhs_type: &Type, rhs_type: &Type, op: &HirBinaryOp, span: Span, - ) -> Result { - use crate::BinaryOpKind::{Equal, NotEqual}; + ) -> Result<(Type, bool), TypeCheckError> { use Type::*; match (lhs_type, rhs_type) { // Avoid reporting errors multiple times - (Error, _) | (_, Error) => Ok(Bool), + (Error, _) | (_, Error) => Ok((Bool, false)), // Matches on TypeVariable must be first to follow any type // bindings. @@ -791,7 +839,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -816,36 +864,23 @@ impl<'interner> TypeChecker<'interner> { span, }); } - Ok(Bool) - } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) + Ok((Bool, false)) } (FieldElement, FieldElement) => { if op.kind.is_valid_for_field_type() { - Ok(Bool) + Ok((Bool, false)) } else { Err(TypeCheckError::FieldComparison { span }) } } // <= and friends are technically valid for booleans, just not very useful - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), // Special-case == and != for arrays (Array(x_size, x_type), Array(y_size, y_type)) - if matches!(op.kind, Equal | NotEqual) => + if matches!(op.kind, BinaryOpKind::Equal | BinaryOpKind::NotEqual) => { - self.unify(x_type, y_type, || TypeCheckError::TypeMismatchWithSource { - expected: lhs_type.clone(), - actual: rhs_type.clone(), - source: Source::ArrayElements, - span: op.location.span, - }); - self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), actual: rhs_type.clone(), @@ -853,19 +888,9 @@ impl<'interner> TypeChecker<'interner> { span: op.location.span, }); - Ok(Bool) - } - (lhs @ NamedGeneric(binding_a, _), rhs @ NamedGeneric(binding_b, _)) => { - if binding_a == binding_b { - return Ok(Bool); - } - Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }) + self.comparator_operand_type_rules(x_type, y_type, op, span) } + (String(x_size), String(y_size)) => { self.unify(x_size, y_size, || TypeCheckError::TypeMismatchWithSource { expected: *x_size.clone(), @@ -874,14 +899,17 @@ impl<'interner> TypeChecker<'interner> { source: Source::StringLen, }); - Ok(Bool) + Ok((Bool, false)) + } + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((Bool, true)) } - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::Comparison, - span, - }), } } @@ -921,7 +949,7 @@ impl<'interner> TypeChecker<'interner> { &self.current_function.expect("unexpected method outside a function"), ); - for constraint in func_meta.trait_constraints { + for constraint in &func_meta.trait_constraints { if *object_type == constraint.typ { if let Some(the_trait) = self.interner.try_get_trait(constraint.trait_id) { for (method_index, method) in the_trait.methods.iter().enumerate() { @@ -930,7 +958,10 @@ impl<'interner> TypeChecker<'interner> { trait_id: constraint.trait_id, method_index, }; - return Some(HirMethodReference::TraitMethodId(trait_method)); + return Some(HirMethodReference::TraitMethodId( + trait_method, + constraint.trait_generics.clone(), + )); } } } @@ -1041,13 +1072,16 @@ impl<'interner> TypeChecker<'interner> { } // Given a binary operator and another type. This method will produce the output type + // and a boolean indicating whether to use the trait impl corresponding to the operator + // or not. A value of false indicates the caller to use a primitive operation for this + // operator, while a true value indicates a user-provided trait impl is required. fn infix_operand_type_rules( &mut self, lhs_type: &Type, op: &HirBinaryOp, rhs_type: &Type, span: Span, - ) -> Result { + ) -> Result<(Type, bool), TypeCheckError> { if op.kind.is_comparator() { return self.comparator_operand_type_rules(lhs_type, rhs_type, op, span); } @@ -1055,7 +1089,7 @@ impl<'interner> TypeChecker<'interner> { use Type::*; match (lhs_type, rhs_type) { // An error type on either side will always return an error - (Error, _) | (_, Error) => Ok(Error), + (Error, _) | (_, Error) => Ok((Error, false)), // Matches on TypeVariable must be first so that we follow any type // bindings. @@ -1096,7 +1130,7 @@ impl<'interner> TypeChecker<'interner> { || other == &Type::Error { Type::apply_type_bindings(bindings); - Ok(other.clone()) + Ok((other.clone(), false)) } else { Err(TypeCheckError::TypeMismatchWithSource { expected: lhs_type.clone(), @@ -1121,31 +1155,8 @@ impl<'interner> TypeChecker<'interner> { span, }); } - if op.is_bit_shift() - && (*sign_x == Signedness::Signed || *sign_y == Signedness::Signed) - { - Err(TypeCheckError::InvalidInfixOp { kind: "Signed integer", span }) - } else { - Ok(Integer(*sign_x, *bit_width_x)) - } + Ok((Integer(*sign_x, *bit_width_x), false)) } - (Integer(..), FieldElement) | (FieldElement, Integer(..)) => { - Err(TypeCheckError::IntegerAndFieldBinaryOperation { span }) - } - (Integer(..), typ) | (typ, Integer(..)) => { - Err(TypeCheckError::IntegerTypeMismatch { typ: typ.clone(), span }) - } - // These types are not supported in binary operations - (Array(..), _) | (_, Array(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Arrays", span }) - } - (Struct(..), _) | (_, Struct(..)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Structs", span }) - } - (Tuple(_), _) | (_, Tuple(_)) => { - Err(TypeCheckError::InvalidInfixOp { kind: "Tuples", span }) - } - // The result of two Fields is always a witness (FieldElement, FieldElement) => { if op.is_bitwise() { @@ -1154,17 +1165,20 @@ impl<'interner> TypeChecker<'interner> { if op.is_modulo() { return Err(TypeCheckError::FieldModulo { span }); } - Ok(FieldElement) + Ok((FieldElement, false)) } - (Bool, Bool) => Ok(Bool), + (Bool, Bool) => Ok((Bool, false)), - (lhs, rhs) => Err(TypeCheckError::TypeMismatchWithSource { - expected: lhs.clone(), - actual: rhs.clone(), - source: Source::BinOp(op.kind), - span, - }), + (lhs, rhs) => { + self.unify(lhs, rhs, || TypeCheckError::TypeMismatchWithSource { + expected: lhs.clone(), + actual: rhs.clone(), + span: op.location.span, + source: Source::Binary, + }); + Ok((lhs.clone(), true)) + } } } @@ -1216,6 +1230,59 @@ impl<'interner> TypeChecker<'interner> { } } } + + /// Prerequisite: verify_trait_constraint of the operator's trait constraint. + /// + /// Although by this point the operator is expected to already have a trait impl, + /// we still need to match the operator's type against the method's instantiated type + /// to ensure the instantiation bindings are correct and the monomorphizer can + /// re-apply the needed bindings. + fn typecheck_operator_method( + &mut self, + expr_id: ExprId, + trait_method_id: TraitMethodId, + object_type: &Type, + span: Span, + ) { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + + let method = &the_trait.methods[trait_method_id.method_index]; + let (method_type, mut bindings) = method.typ.instantiate(self.interner); + + match method_type { + Type::Function(args, _, _) => { + // We can cheat a bit and match against only the object type here since no operator + // overload uses other generic parameters or return types aside from the object type. + let expected_object_type = &args[0]; + self.unify(object_type, expected_object_type, || TypeCheckError::TypeMismatch { + expected_typ: expected_object_type.to_string(), + expr_typ: object_type.to_string(), + expr_span: span, + }); + } + other => { + unreachable!("Expected operator method to have a function type, but found {other}") + } + } + + // We must also remember to apply these substitutions to the object_type + // referenced by the selected trait impl, if one has yet to be selected. + let impl_kind = self.interner.get_selected_impl_for_expression(expr_id); + if let Some(TraitImplKind::Assumed { object_type, trait_generics }) = impl_kind { + let the_trait = self.interner.get_trait(trait_method_id.trait_id); + let object_type = object_type.substitute(&bindings); + bindings.insert( + the_trait.self_type_typevar_id, + (the_trait.self_type_typevar.clone(), object_type.clone()), + ); + self.interner.select_impl_for_expression( + expr_id, + TraitImplKind::Assumed { object_type, trait_generics }, + ); + } + + self.interner.store_instantiation_bindings(expr_id, bindings); + } } /// Taken from: https://stackoverflow.com/a/47127500 diff --git a/compiler/noirc_frontend/src/hir/type_check/mod.rs b/compiler/noirc_frontend/src/hir/type_check/mod.rs index 95991047091..3c2a970ee84 100644 --- a/compiler/noirc_frontend/src/hir/type_check/mod.rs +++ b/compiler/noirc_frontend/src/hir/type_check/mod.rs @@ -50,18 +50,24 @@ pub fn type_check_func(interner: &mut NodeInterner, func_id: FuncId) -> Vec Vec Vec Vec Vec TypeChecker<'interner> { // Must push new lvalue to the interner, we've resolved any field indices self.interner.update_statement(stmt_id, |stmt| match stmt { HirStatement::Assign(assign) => assign.lvalue = new_lvalue, - _ => unreachable!(), + _ => unreachable!("statement is known to be assignment"), }); let span = self.interner.expr_span(&assign_stmt.expression); @@ -195,7 +195,7 @@ impl<'interner> TypeChecker<'interner> { typ.follow_bindings() }; - (typ.clone(), HirLValue::Ident(*ident, typ), mutable) + (typ.clone(), HirLValue::Ident(ident.clone(), typ), mutable) } HirLValue::MemberAccess { object, field_name, .. } => { let (lhs_type, object, mut mutable) = self.check_lvalue(object, assign_span); @@ -216,8 +216,8 @@ impl<'interner> TypeChecker<'interner> { // we eventually reassign to it. let id = DefinitionId::dummy_id(); let location = Location::new(span, fm::FileId::dummy()); - let tmp_value = - HirLValue::Ident(HirIdent { location, id }, Type::Error); + let ident = HirIdent::non_trait_method(id, location); + let tmp_value = HirLValue::Ident(ident, Type::Error); let lvalue = std::mem::replace(object_ref, Box::new(tmp_value)); *object_ref = Box::new(HirLValue::Dereference { lvalue, element_type }); diff --git a/compiler/noirc_frontend/src/hir_def/expr.rs b/compiler/noirc_frontend/src/hir_def/expr.rs index ef1c3af7ac0..fe1cd78b5ed 100644 --- a/compiler/noirc_frontend/src/hir_def/expr.rs +++ b/compiler/noirc_frontend/src/hir_def/expr.rs @@ -6,6 +6,7 @@ use crate::node_interner::{DefinitionId, ExprId, FuncId, NodeInterner, StmtId, T use crate::{BinaryOp, BinaryOpKind, Ident, Shared, UnaryOp}; use super::stmt::HirPattern; +use super::traits::TraitConstraint; use super::types::{StructType, Type}; /// A HirExpression is the result of an Expression in the AST undergoing @@ -29,7 +30,6 @@ pub enum HirExpression { If(HirIfExpression), Tuple(Vec), Lambda(HirLambda), - TraitMethodReference(TraitMethodId), Error, } @@ -41,10 +41,45 @@ impl HirExpression { } /// Corresponds to a variable in the source code -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone)] pub struct HirIdent { pub location: Location, pub id: DefinitionId, + + /// If this HirIdent refers to a trait method, this field stores + /// whether the impl for this method is known or not. + pub impl_kind: ImplKind, +} + +impl HirIdent { + pub fn non_trait_method(id: DefinitionId, location: Location) -> Self { + Self { id, location, impl_kind: ImplKind::NotATraitMethod } + } +} + +#[derive(Debug, Clone)] +pub enum ImplKind { + /// This ident is not a trait method + NotATraitMethod, + + /// This ident refers to a trait method and its impl needs to be verified, + /// and eventually linked to this id. The boolean indicates whether the impl + /// is already assumed to exist - e.g. when resolving a path such as `T::default` + /// when there is a corresponding `T: Default` constraint in scope. + TraitMethod(TraitMethodId, TraitConstraint, bool), +} + +impl Eq for HirIdent {} +impl PartialEq for HirIdent { + fn eq(&self, other: &Self) -> bool { + self.id == other.id + } +} + +impl std::hash::Hash for HirIdent { + fn hash(&self, state: &mut H) { + self.id.hash(state); + } } #[derive(Debug, Copy, Clone, PartialEq, Eq)] @@ -101,6 +136,12 @@ pub struct HirInfixExpression { pub lhs: ExprId, pub operator: HirBinaryOp, pub rhs: ExprId, + + /// The trait method id for the operator trait method that corresponds to this operator. + /// For derived operators like `!=`, this will lead to the method `Eq::eq`. For these + /// cases, it is up to the monomorphization pass to insert the appropriate `not` operation + /// after the call to `Eq::eq` to get the result of the `!=` operator. + pub trait_method_id: TraitMethodId, } /// This is always a struct field access `my_struct.field` @@ -156,28 +197,35 @@ pub enum HirMethodReference { /// Or a method can come from a Trait impl block, in which case /// the actual function called will depend on the instantiated type, /// which can be only known during monomorphization. - TraitMethodId(TraitMethodId), + TraitMethodId(TraitMethodId, /*trait generics:*/ Vec), } impl HirMethodCallExpression { pub fn into_function_call( mut self, - method: HirMethodReference, + method: &HirMethodReference, + object_type: Type, location: Location, interner: &mut NodeInterner, ) -> (ExprId, HirExpression) { let mut arguments = vec![self.object]; arguments.append(&mut self.arguments); - let expr = match method { + let (id, impl_kind) = match method { HirMethodReference::FuncId(func_id) => { - let id = interner.function_definition_id(func_id); - HirExpression::Ident(HirIdent { location, id }) + (interner.function_definition_id(*func_id), ImplKind::NotATraitMethod) } - HirMethodReference::TraitMethodId(method_id) => { - HirExpression::TraitMethodReference(method_id) + HirMethodReference::TraitMethodId(method_id, generics) => { + let id = interner.trait_method_id(*method_id); + let constraint = TraitConstraint { + typ: object_type, + trait_id: method_id.trait_id, + trait_generics: generics.clone(), + }; + (id, ImplKind::TraitMethod(*method_id, constraint, false)) } }; + let expr = HirExpression::Ident(HirIdent { location, id, impl_kind }); let func = interner.push_expr(expr); (func, HirExpression::Call(HirCallExpression { func, arguments, location })) } diff --git a/compiler/noirc_frontend/src/hir_def/function.rs b/compiler/noirc_frontend/src/hir_def/function.rs index 085bda107e3..9fff301f5f7 100644 --- a/compiler/noirc_frontend/src/hir_def/function.rs +++ b/compiler/noirc_frontend/src/hir_def/function.rs @@ -131,22 +131,12 @@ impl FuncMeta { } } - pub fn into_function_signature(self) -> FunctionSignature { - // Doesn't use `self.return_type()` so we aren't working with references and don't need a `clone()` - let return_type = match self.typ { - Type::Function(_, ret, _env) => *ret, - Type::Forall(_, typ) => match *typ { - Type::Function(_, ret, _env) => *ret, - _ => unreachable!(), - }, - _ => unreachable!(), - }; - let return_type = match return_type { + pub fn function_signature(&self) -> FunctionSignature { + let return_type = match self.return_type() { Type::Unit => None, - typ => Some(typ), + typ => Some(typ.clone()), }; - - (self.parameters.0, return_type) + (self.parameters.0.clone(), return_type) } /// Gives the (uninstantiated) return type of this function. diff --git a/compiler/noirc_frontend/src/hir_def/stmt.rs b/compiler/noirc_frontend/src/hir_def/stmt.rs index 21f9b431b3a..34c9302c251 100644 --- a/compiler/noirc_frontend/src/hir_def/stmt.rs +++ b/compiler/noirc_frontend/src/hir_def/stmt.rs @@ -28,8 +28,8 @@ pub struct HirLetStatement { impl HirLetStatement { pub fn ident(&self) -> HirIdent { - match self.pattern { - HirPattern::Identifier(ident) => ident, + match &self.pattern { + HirPattern::Identifier(ident) => ident.clone(), _ => panic!("can only fetch hir ident from HirPattern::Identifier"), } } diff --git a/compiler/noirc_frontend/src/hir_def/traits.rs b/compiler/noirc_frontend/src/hir_def/traits.rs index ea9c2e2928c..16b9899039f 100644 --- a/compiler/noirc_frontend/src/hir_def/traits.rs +++ b/compiler/noirc_frontend/src/hir_def/traits.rs @@ -3,18 +3,17 @@ use std::collections::HashMap; use crate::{ graph::CrateId, node_interner::{FuncId, TraitId, TraitMethodId}, - Generics, Ident, NoirFunction, Type, TypeVariable, TypeVariableId, + Generics, Ident, NoirFunction, Type, TypeBindings, TypeVariable, TypeVariableId, }; use fm::FileId; -use noirc_errors::Span; +use noirc_errors::{Location, Span}; #[derive(Clone, Debug, PartialEq, Eq)] pub struct TraitFunction { pub name: Ident, pub typ: Type, - pub span: Span, + pub location: Location, pub default_impl: Option>, - pub default_impl_file_id: fm::FileId, pub default_impl_module_id: crate::hir::def_map::LocalModuleId, } @@ -56,7 +55,7 @@ pub struct Trait { pub name: Ident, pub generics: Generics, - pub span: Span, + pub location: Location, /// When resolving the types of Trait elements, all references to `Self` resolve /// to this TypeVariable. Then when we check if the types of trait impl elements @@ -65,11 +64,13 @@ pub struct Trait { pub self_type_typevar_id: TypeVariableId, pub self_type_typevar: TypeVariable, } + #[derive(Debug)] pub struct TraitImpl { pub ident: Ident, pub typ: Type, pub trait_id: TraitId, + pub trait_generics: Vec, pub file: FileId, pub methods: Vec, // methods[i] is the implementation of trait.methods[i] for Type typ @@ -84,12 +85,20 @@ pub struct TraitImpl { pub struct TraitConstraint { pub typ: Type, pub trait_id: TraitId, - // pub trait_generics: Generics, TODO + pub trait_generics: Vec, } impl TraitConstraint { - pub fn new(typ: Type, trait_id: TraitId) -> Self { - Self { typ, trait_id } + pub fn new(typ: Type, trait_id: TraitId, trait_generics: Vec) -> Self { + Self { typ, trait_id, trait_generics } + } + + pub fn apply_bindings(&mut self, type_bindings: &TypeBindings) { + self.typ = self.typ.substitute(type_bindings); + + for typ in &mut self.trait_generics { + *typ = typ.substitute(type_bindings); + } } } @@ -138,7 +147,7 @@ impl TraitFunction { } } - pub fn generics(&self) -> &[(TypeVariableId, TypeVariable)] { + pub fn generics(&self) -> &[TypeVariable] { match &self.typ { Type::Function(..) => &[], Type::Forall(generics, _) => generics, diff --git a/compiler/noirc_frontend/src/hir_def/types.rs b/compiler/noirc_frontend/src/hir_def/types.rs index f24ea586757..2c08a980d23 100644 --- a/compiler/noirc_frontend/src/hir_def/types.rs +++ b/compiler/noirc_frontend/src/hir_def/types.rs @@ -1,4 +1,5 @@ use std::{ + borrow::Cow, cell::RefCell, collections::{BTreeSet, HashMap}, rc::Rc, @@ -63,9 +64,9 @@ pub enum Type { TypeVariable(TypeVariable, TypeVariableKind), /// `impl Trait` when used in a type position. - /// These are only matched based on the TraitId. The trait name paramer is only + /// These are only matched based on the TraitId. The trait name parameter is only /// used for displaying error messages using the name of the trait. - TraitAsType(TraitId, /*name:*/ Rc), + TraitAsType(TraitId, /*name:*/ Rc, /*generics:*/ Vec), /// NamedGenerics are the 'T' or 'U' in a user-defined generic function /// like `fn foo(...) {}`. Unlike TypeVariables, they cannot be bound over. @@ -142,6 +143,43 @@ impl Type { | Type::Error => unreachable!("This type cannot exist as a parameter to main"), } } + + pub(crate) fn is_nested_slice(&self) -> bool { + match self { + Type::Array(size, elem) => { + if let Type::NotConstant = size.as_ref() { + elem.as_ref().contains_slice() + } else { + false + } + } + _ => false, + } + } + + fn contains_slice(&self) -> bool { + match self { + Type::Array(size, _) => matches!(size.as_ref(), Type::NotConstant), + Type::Struct(struct_typ, generics) => { + let fields = struct_typ.borrow().get_fields(generics); + for field in fields.iter() { + if field.1.contains_slice() { + return true; + } + } + false + } + Type::Tuple(types) => { + for typ in types.iter() { + if typ.contains_slice() { + return true; + } + } + false + } + _ => false, + } + } } /// A list of TypeVariableIds to bind to a type. Storing the @@ -169,11 +207,8 @@ pub struct StructType { pub location: Location, } -/// Corresponds to generic lists such as `` in the source -/// program. The `TypeVariableId` portion is used to match two -/// type variables to check for equality, while the `TypeVariable` is -/// the actual part that can be mutated to bind it to another type. -pub type Generics = Vec<(TypeVariableId, TypeVariable)>; +/// Corresponds to generic lists such as `` in the source program. +pub type Generics = Vec; impl std::hash::Hash for StructType { fn hash(&self, state: &mut H) { @@ -222,7 +257,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); (typ.substitute(&substitutions), i) @@ -238,7 +273,7 @@ impl StructType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); vecmap(&self.fields, |(name, typ)| { @@ -279,7 +314,7 @@ pub struct TypeAliasType { pub id: TypeAliasId, pub typ: Type, pub generics: Generics, - pub span: Span, + pub location: Location, } impl std::hash::Hash for TypeAliasType { @@ -299,7 +334,7 @@ impl std::fmt::Display for TypeAliasType { write!(f, "{}", self.name)?; if !self.generics.is_empty() { - let generics = vecmap(&self.generics, |(_, binding)| binding.0.borrow().to_string()); + let generics = vecmap(&self.generics, |binding| binding.borrow().to_string()); write!(f, "{}", generics.join(", "))?; } @@ -311,11 +346,11 @@ impl TypeAliasType { pub fn new( id: TypeAliasId, name: Ident, - span: Span, + location: Location, typ: Type, generics: Generics, ) -> TypeAliasType { - TypeAliasType { id, typ, name, span, generics } + TypeAliasType { id, typ, name, location, generics } } pub fn set_type_and_generics(&mut self, new_typ: Type, new_generics: Generics) { @@ -331,7 +366,7 @@ impl TypeAliasType { .generics .iter() .zip(generic_args) - .map(|((old_id, old_var), new)| (*old_id, (old_var.clone(), new.clone()))) + .map(|(old, new)| (old.id(), (old.clone(), new.clone()))) .collect(); self.typ.substitute(&substitutions) @@ -415,11 +450,15 @@ pub enum TypeVariableKind { /// A TypeVariable is a mutable reference that is either /// bound to some type, or unbound with a given TypeVariableId. #[derive(Debug, PartialEq, Eq, Clone, Hash)] -pub struct TypeVariable(Shared); +pub struct TypeVariable(TypeVariableId, Shared); impl TypeVariable { pub fn unbound(id: TypeVariableId) -> Self { - TypeVariable(Shared::new(TypeBinding::Unbound(id))) + TypeVariable(id, Shared::new(TypeBinding::Unbound(id))) + } + + pub fn id(&self) -> TypeVariableId { + self.0 } /// Bind this type variable to a value. @@ -428,7 +467,7 @@ impl TypeVariable { /// Also Panics if the ID of this TypeVariable occurs within the given /// binding, as that would cause an infinitely recursive type. pub fn bind(&self, typ: Type) { - let id = match &*self.0.borrow() { + let id = match &*self.1.borrow() { TypeBinding::Bound(binding) => { unreachable!("TypeVariable::bind, cannot bind bound var {} to {}", binding, typ) } @@ -436,11 +475,11 @@ impl TypeVariable { }; assert!(!typ.occurs(id)); - *self.0.borrow_mut() = TypeBinding::Bound(typ); + *self.1.borrow_mut() = TypeBinding::Bound(typ); } pub fn try_bind(&self, binding: Type, span: Span) -> Result<(), TypeCheckError> { - let id = match &*self.0.borrow() { + let id = match &*self.1.borrow() { TypeBinding::Bound(binding) => { unreachable!("Expected unbound, found bound to {binding}") } @@ -450,28 +489,28 @@ impl TypeVariable { if binding.occurs(id) { Err(TypeCheckError::TypeAnnotationsNeeded { span }) } else { - *self.0.borrow_mut() = TypeBinding::Bound(binding); + *self.1.borrow_mut() = TypeBinding::Bound(binding); Ok(()) } } /// Borrows this TypeVariable to (e.g.) manually match on the inner TypeBinding. pub fn borrow(&self) -> std::cell::Ref { - self.0.borrow() + self.1.borrow() } /// Unbind this type variable, setting it to Unbound(id). /// /// This is generally a logic error to use outside of monomorphization. pub fn unbind(&self, id: TypeVariableId) { - *self.0.borrow_mut() = TypeBinding::Unbound(id); + *self.1.borrow_mut() = TypeBinding::Unbound(id); } /// Forcibly bind a type variable to a new type - even if the type /// variable is already bound to a different type. This generally /// a logic error to use outside of monomorphization. pub fn force_bind(&self, typ: Type) { - *self.0.borrow_mut() = TypeBinding::Bound(typ); + *self.1.borrow_mut() = TypeBinding::Bound(typ); } } @@ -503,7 +542,7 @@ impl Type { } pub fn type_variable(id: TypeVariableId) -> Type { - let var = TypeVariable(Shared::new(TypeBinding::Unbound(id))); + let var = TypeVariable::unbound(id); Type::TypeVariable(var, TypeVariableKind::Normal) } @@ -512,14 +551,14 @@ impl Type { pub fn constant_variable(length: u64, interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::Constant(length); - let var = TypeVariable(Shared::new(TypeBinding::Unbound(id))); + let var = TypeVariable::unbound(id); Type::TypeVariable(var, kind) } pub fn polymorphic_integer(interner: &mut NodeInterner) -> Type { let id = interner.next_type_variable_id(); let kind = TypeVariableKind::IntegerOrField; - let var = TypeVariable(Shared::new(TypeBinding::Unbound(id))); + let var = TypeVariable::unbound(id); Type::TypeVariable(var, kind) } @@ -529,7 +568,7 @@ impl Type { /// they shouldn't be bound over until monomorphization. pub fn is_bindable(&self) -> bool { match self { - Type::TypeVariable(binding, _) => match &*binding.0.borrow() { + Type::TypeVariable(binding, _) => match &*binding.borrow() { TypeBinding::Bound(binding) => binding.is_bindable(), TypeBinding::Unbound(_) => true, }, @@ -553,7 +592,7 @@ impl Type { // True if the given type is a NamedGeneric with the target_id let named_generic_id_matches_target = |typ: &Type| { if let Type::NamedGeneric(type_variable, _) = typ { - match &*type_variable.0.borrow() { + match &*type_variable.borrow() { TypeBinding::Bound(_) => { unreachable!("Named generics should not be bound until monomorphization") } @@ -653,7 +692,7 @@ impl Type { match self { Type::Forall(generics, _) => generics.len(), Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _) => { - match &*type_variable.0.borrow() { + match &*type_variable.borrow() { TypeBinding::Bound(binding) => binding.generic_count(), TypeBinding::Unbound(_) => 0, } @@ -665,9 +704,28 @@ impl Type { /// Takes a monomorphic type and generalizes it over each of the type variables in the /// given type bindings, ignoring what each type variable is bound to in the TypeBindings. pub(crate) fn generalize_from_substitutions(self, type_bindings: TypeBindings) -> Type { - let polymorphic_type_vars = vecmap(type_bindings, |(id, (type_var, _))| (id, type_var)); + let polymorphic_type_vars = vecmap(type_bindings, |(_, (type_var, _))| type_var); Type::Forall(polymorphic_type_vars, Box::new(self)) } + + /// Return this type as a monomorphic type - without a `Type::Forall` if there is one. + /// This is only a shallow check since Noir's type system prohibits `Type::Forall` anywhere + /// inside other types. + pub fn as_monotype(&self) -> &Type { + match self { + Type::Forall(_, typ) => typ.as_ref(), + other => other, + } + } + + /// Return the generics and type within this `Type::Forall`. + /// Panics if `self` is not `Type::Forall` + pub fn unwrap_forall(&self) -> (Cow, &Type) { + match self { + Type::Forall(generics, typ) => (Cow::Borrowed(generics), typ.as_ref()), + other => (Cow::Owned(Generics::new()), other), + } + } } impl std::fmt::Display for Type { @@ -687,23 +745,23 @@ impl std::fmt::Display for Type { Signedness::Signed => write!(f, "i{num_bits}"), Signedness::Unsigned => write!(f, "u{num_bits}"), }, - Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.0.borrow()), + Type::TypeVariable(var, TypeVariableKind::Normal) => write!(f, "{}", var.borrow()), Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { - if let TypeBinding::Unbound(_) = &*binding.0.borrow() { + if let TypeBinding::Unbound(_) = &*binding.borrow() { // Show a Field by default if this TypeVariableKind::IntegerOrField is unbound, since that is // what they bind to by default anyway. It is less confusing than displaying it // as a generic. write!(f, "Field") } else { - write!(f, "{}", binding.0.borrow()) + write!(f, "{}", binding.borrow()) } } Type::TypeVariable(binding, TypeVariableKind::Constant(n)) => { - if let TypeBinding::Unbound(_) = &*binding.0.borrow() { + if let TypeBinding::Unbound(_) = &*binding.borrow() { // TypeVariableKind::Constant(n) binds to Type::Constant(n) by default, so just show that. write!(f, "{n}") } else { - write!(f, "{}", binding.0.borrow()) + write!(f, "{}", binding.borrow()) } } Type::Struct(s, args) => { @@ -714,8 +772,13 @@ impl std::fmt::Display for Type { write!(f, "{}<{}>", s.borrow(), args.join(", ")) } } - Type::TraitAsType(_id, name) => { - write!(f, "impl {}", name) + Type::TraitAsType(_id, name, generics) => { + write!(f, "impl {}", name)?; + if !generics.is_empty() { + let generics = vecmap(generics, ToString::to_string).join(", "); + write!(f, "<{generics}>")?; + } + Ok(()) } Type::Tuple(elements) => { let elements = vecmap(elements, ToString::to_string); @@ -728,14 +791,14 @@ impl std::fmt::Display for Type { } Type::Unit => write!(f, "()"), Type::Error => write!(f, "error"), - Type::NamedGeneric(binding, name) => match &*binding.0.borrow() { + Type::NamedGeneric(binding, name) => match &*binding.borrow() { TypeBinding::Bound(binding) => binding.fmt(f), TypeBinding::Unbound(_) if name.is_empty() => write!(f, "_"), TypeBinding::Unbound(_) => write!(f, "{name}"), }, Type::Constant(x) => x.fmt(f), Type::Forall(typevars, typ) => { - let typevars = vecmap(typevars, |(var, _)| var.to_string()); + let typevars = vecmap(typevars, |var| var.id().to_string()); write!(f, "forall {}. {}", typevars.join(" "), typ) } Type::Function(args, ret, env) => { @@ -795,7 +858,7 @@ impl Type { target_length: u64, bindings: &mut TypeBindings, ) -> Result<(), UnificationError> { - let target_id = match &*var.0.borrow() { + let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; @@ -814,7 +877,7 @@ impl Type { // A TypeVariable is less specific than a MaybeConstant, so we bind // to the other type variable instead. Type::TypeVariable(new_var, kind) => { - let borrow = new_var.0.borrow(); + let borrow = new_var.borrow(); match &*borrow { TypeBinding::Bound(typ) => { typ.try_bind_to_maybe_constant(var, target_length, bindings) @@ -862,7 +925,7 @@ impl Type { var: &TypeVariable, bindings: &mut TypeBindings, ) -> Result<(), UnificationError> { - let target_id = match &*var.0.borrow() { + let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; @@ -875,7 +938,7 @@ impl Type { Ok(()) } Type::TypeVariable(self_var, TypeVariableKind::IntegerOrField) => { - let borrow = self_var.0.borrow(); + let borrow = self_var.borrow(); match &*borrow { TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), // Avoid infinitely recursive bindings @@ -887,7 +950,7 @@ impl Type { } } Type::TypeVariable(binding, TypeVariableKind::Normal) => { - let borrow = binding.0.borrow(); + let borrow = binding.borrow(); match &*borrow { TypeBinding::Bound(typ) => typ.try_bind_to_polymorphic_int(var, bindings), // Avoid infinitely recursive bindings @@ -917,7 +980,7 @@ impl Type { var: &TypeVariable, bindings: &mut TypeBindings, ) -> Result<(), UnificationError> { - let target_id = match &*var.0.borrow() { + let target_id = match &*var.borrow() { TypeBinding::Bound(_) => unreachable!(), TypeBinding::Unbound(id) => *id, }; @@ -945,7 +1008,7 @@ impl Type { fn get_inner_type_variable(&self) -> Option> { match self { - Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.0.clone()), + Type::TypeVariable(var, _) | Type::NamedGeneric(var, _) => Some(var.1.clone()), _ => None, } } @@ -1041,9 +1104,9 @@ impl Type { } (NamedGeneric(binding, _), other) | (other, NamedGeneric(binding, _)) - if !binding.0.borrow().is_unbound() => + if !binding.borrow().is_unbound() => { - if let TypeBinding::Bound(link) = &*binding.0.borrow() { + if let TypeBinding::Bound(link) = &*binding.borrow() { link.try_unify(other, bindings) } else { unreachable!("If guard ensures binding is bound") @@ -1052,8 +1115,8 @@ impl Type { (NamedGeneric(binding_a, name_a), NamedGeneric(binding_b, name_b)) => { // Bound NamedGenerics are caught by the check above - assert!(binding_a.0.borrow().is_unbound()); - assert!(binding_b.0.borrow().is_unbound()); + assert!(binding_a.borrow().is_unbound()); + assert!(binding_b.borrow().is_unbound()); if name_a == name_b { Ok(()) @@ -1101,7 +1164,7 @@ impl Type { // bind to the given type or not. bind_variable: impl FnOnce(&mut TypeBindings) -> Result<(), UnificationError>, ) -> Result<(), UnificationError> { - match &*type_variable.0.borrow() { + match &*type_variable.borrow() { // If it is already bound, unify against what it is bound to TypeBinding::Bound(link) => link.try_unify(self, bindings), TypeBinding::Unbound(id) => { @@ -1231,6 +1294,29 @@ impl Type { } } + /// Instantiate this type with the given type bindings. + /// If any type variables which would be instantiated are contained in the + /// given type bindings instead, the value from the type bindings is used. + pub fn instantiate_with_bindings( + &self, + mut bindings: TypeBindings, + interner: &NodeInterner, + ) -> (Type, TypeBindings) { + match self { + Type::Forall(typevars, typ) => { + for var in typevars { + bindings + .entry(var.id()) + .or_insert_with(|| (var.clone(), interner.next_type_variable())); + } + + let instantiated = typ.force_substitute(&bindings); + (instantiated, bindings) + } + other => (other.clone(), bindings), + } + } + /// Instantiate this type, replacing any type variables it is quantified /// over with fresh type variables. If this type is not a Type::Forall, /// it is unchanged. @@ -1239,157 +1325,123 @@ impl Type { Type::Forall(typevars, typ) => { let replacements = typevars .iter() - .map(|(id, var)| { + .map(|var| { let new = interner.next_type_variable(); - (*id, (var.clone(), new)) + (var.id(), (var.clone(), new)) }) .collect(); - let instantiated = typ.substitute(&replacements); + let instantiated = typ.force_substitute(&replacements); (instantiated, replacements) } other => (other.clone(), HashMap::new()), } } - /// Replace each NamedGeneric (and TypeVariable) in this type with a fresh type variable - pub(crate) fn instantiate_type_variables( - &self, - interner: &NodeInterner, - ) -> (Type, TypeBindings) { - let mut type_variables = HashMap::new(); - self.find_all_unbound_type_variables(&mut type_variables); - - let substitutions = type_variables - .into_iter() - .map(|(id, type_var)| (id, (type_var, interner.next_type_variable()))) - .collect(); - - (self.substitute(&substitutions), substitutions) - } - - /// For each unbound type variable in the current type, add a type binding to the given list - /// to bind the unbound type variable to a fresh type variable. - fn find_all_unbound_type_variables( - &self, - type_variables: &mut HashMap, - ) { - match self { - Type::FieldElement - | Type::Integer(_, _) - | Type::Bool - | Type::Unit - | Type::TraitAsType(..) - | Type::Constant(_) - | Type::NotConstant - | Type::Error => (), - Type::Array(length, elem) => { - length.find_all_unbound_type_variables(type_variables); - elem.find_all_unbound_type_variables(type_variables); - } - Type::String(length) => length.find_all_unbound_type_variables(type_variables), - Type::FmtString(length, env) => { - length.find_all_unbound_type_variables(type_variables); - env.find_all_unbound_type_variables(type_variables); - } - Type::Struct(_, generics) => { - for generic in generics { - generic.find_all_unbound_type_variables(type_variables); - } - } - Type::Tuple(fields) => { - for field in fields { - field.find_all_unbound_type_variables(type_variables); - } - } - Type::Function(args, ret, env) => { - for arg in args { - arg.find_all_unbound_type_variables(type_variables); - } - ret.find_all_unbound_type_variables(type_variables); - env.find_all_unbound_type_variables(type_variables); - } - Type::MutableReference(elem) => { - elem.find_all_unbound_type_variables(type_variables); - } - Type::Forall(_, typ) => typ.find_all_unbound_type_variables(type_variables), - Type::TypeVariable(type_variable, _) | Type::NamedGeneric(type_variable, _) => { - match &*type_variable.0.borrow() { - TypeBinding::Bound(binding) => { - binding.find_all_unbound_type_variables(type_variables); - } - TypeBinding::Unbound(id) => { - if !type_variables.contains_key(id) { - type_variables.insert(*id, type_variable.clone()); - } - } - } - } - } - } - /// Substitute any type variables found within this type with the /// given bindings if found. If a type variable is not found within /// the given TypeBindings, it is unchanged. pub fn substitute(&self, type_bindings: &TypeBindings) -> Type { + self.substitute_helper(type_bindings, false) + } + + /// Forcibly substitute any type variables found within this type with the + /// given bindings if found. If a type variable is not found within + /// the given TypeBindings, it is unchanged. + /// + /// Compared to `substitute`, this function will also substitute any type variables + /// from type_bindings, even if they are bound in `self`. Since this can undo previous + /// bindings, this function should be avoided unless necessary. Currently, it is only + /// needed when handling bindings between trait methods and their corresponding impl + /// method during monomorphization. + pub fn force_substitute(&self, type_bindings: &TypeBindings) -> Type { + self.substitute_helper(type_bindings, true) + } + + /// This helper function only differs in the additional parameter which, if set, + /// allows substitutions on already-bound type variables. This should be `false` + /// for most uses, but is currently needed during monomorphization when instantiating + /// trait functions to shed any previous bindings from recursive parent calls to the + /// same trait. + fn substitute_helper( + &self, + type_bindings: &TypeBindings, + substitute_bound_typevars: bool, + ) -> Type { if type_bindings.is_empty() { return self.clone(); } - let substitute_binding = |binding: &TypeVariable| match &*binding.0.borrow() { - TypeBinding::Bound(binding) => binding.substitute(type_bindings), - TypeBinding::Unbound(id) => match type_bindings.get(id) { - Some((_, binding)) => binding.clone(), - None => self.clone(), - }, + let substitute_binding = |binding: &TypeVariable| { + // Check the id first to allow substituting to + // type variables that have already been bound over. + // This is needed for monomorphizing trait impl methods. + match type_bindings.get(&binding.0) { + Some((_, binding)) if substitute_bound_typevars => binding.clone(), + _ => match &*binding.borrow() { + TypeBinding::Bound(binding) => { + binding.substitute_helper(type_bindings, substitute_bound_typevars) + } + TypeBinding::Unbound(id) => match type_bindings.get(id) { + Some((_, binding)) => binding.clone(), + None => self.clone(), + }, + }, + } }; match self { Type::Array(size, element) => { - let size = Box::new(size.substitute(type_bindings)); - let element = Box::new(element.substitute(type_bindings)); - Type::Array(size, element) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + let element = element.substitute_helper(type_bindings, substitute_bound_typevars); + Type::Array(Box::new(size), Box::new(element)) } Type::String(size) => { - let size = Box::new(size.substitute(type_bindings)); - Type::String(size) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + Type::String(Box::new(size)) } Type::FmtString(size, fields) => { - let size = Box::new(size.substitute(type_bindings)); - let fields = Box::new(fields.substitute(type_bindings)); - Type::FmtString(size, fields) + let size = size.substitute_helper(type_bindings, substitute_bound_typevars); + let fields = fields.substitute_helper(type_bindings, substitute_bound_typevars); + Type::FmtString(Box::new(size), Box::new(fields)) } Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { substitute_binding(binding) } - // Do not substitute fields, it can lead to infinite recursion + // Do not substitute_helper fields, it ca, substitute_bound_typevarsn lead to infinite recursion // and we should not match fields when type checking anyway. Type::Struct(fields, args) => { - let args = vecmap(args, |arg| arg.substitute(type_bindings)); + let args = vecmap(args, |arg| { + arg.substitute_helper(type_bindings, substitute_bound_typevars) + }); Type::Struct(fields.clone(), args) } Type::Tuple(fields) => { - let fields = vecmap(fields, |field| field.substitute(type_bindings)); + let fields = vecmap(fields, |field| { + field.substitute_helper(type_bindings, substitute_bound_typevars) + }); Type::Tuple(fields) } Type::Forall(typevars, typ) => { - // Trying to substitute a variable defined within a nested Forall + // Trying to substitute_helper a variable de, substitute_bound_typevarsfined within a nested Forall // is usually impossible and indicative of an error in the type checker somewhere. - for (var, _) in typevars { - assert!(!type_bindings.contains_key(var)); + for var in typevars { + assert!(!type_bindings.contains_key(&var.id())); } - let typ = Box::new(typ.substitute(type_bindings)); + let typ = Box::new(typ.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Forall(typevars.clone(), typ) } Type::Function(args, ret, env) => { - let args = vecmap(args, |arg| arg.substitute(type_bindings)); - let ret = Box::new(ret.substitute(type_bindings)); - let env = Box::new(env.substitute(type_bindings)); + let args = vecmap(args, |arg| { + arg.substitute_helper(type_bindings, substitute_bound_typevars) + }); + let ret = Box::new(ret.substitute_helper(type_bindings, substitute_bound_typevars)); + let env = Box::new(env.substitute_helper(type_bindings, substitute_bound_typevars)); Type::Function(args, ret, env) } - Type::MutableReference(element) => { - Type::MutableReference(Box::new(element.substitute(type_bindings))) - } + Type::MutableReference(element) => Type::MutableReference(Box::new( + element.substitute_helper(type_bindings, substitute_bound_typevars), + )), Type::FieldElement | Type::Integer(_, _) @@ -1415,13 +1467,13 @@ impl Type { Type::Struct(_, generic_args) => generic_args.iter().any(|arg| arg.occurs(target_id)), Type::Tuple(fields) => fields.iter().any(|field| field.occurs(target_id)), Type::NamedGeneric(binding, _) | Type::TypeVariable(binding, _) => { - match &*binding.0.borrow() { + match &*binding.borrow() { TypeBinding::Bound(binding) => binding.occurs(target_id), TypeBinding::Unbound(id) => *id == target_id, } } Type::Forall(typevars, typ) => { - !typevars.iter().any(|(id, _)| *id == target_id) && typ.occurs(target_id) + !typevars.iter().any(|var| var.id() == target_id) && typ.occurs(target_id) } Type::Function(args, ret, env) => { args.iter().any(|arg| arg.occurs(target_id)) @@ -1465,7 +1517,7 @@ impl Type { } Tuple(args) => Tuple(vecmap(args, |arg| arg.follow_bindings())), TypeVariable(var, _) | NamedGeneric(var, _) => { - if let TypeBinding::Bound(typ) = &*var.0.borrow() { + if let TypeBinding::Bound(typ) = &*var.borrow() { return typ.follow_bindings(); } self.clone() @@ -1492,6 +1544,10 @@ impl Type { | NotConstant => self.clone(), } } + + pub fn from_generics(generics: &Generics) -> Vec { + vecmap(generics, |var| Type::TypeVariable(var.clone(), TypeVariableKind::Normal)) + } } /// Wraps a given `expression` in `expression.as_slice()` @@ -1507,7 +1563,7 @@ fn convert_array_expression_to_slice( let as_slice_id = interner.function_definition_id(as_slice_method); let location = interner.expr_location(&expression); - let as_slice = HirExpression::Ident(HirIdent { location, id: as_slice_id }); + let as_slice = HirExpression::Ident(HirIdent::non_trait_method(as_slice_id, location)); let func = interner.push_expr(as_slice); let arguments = vec![expression]; @@ -1570,7 +1626,7 @@ impl From<&Type> for PrintableType { Signedness::Signed => PrintableType::SignedInteger { width: *bit_width }, }, Type::TypeVariable(binding, TypeVariableKind::IntegerOrField) => { - match &*binding.0.borrow() { + match &*binding.borrow() { TypeBinding::Bound(typ) => typ.into(), TypeBinding::Unbound(_) => Type::default_int_type().into(), } @@ -1580,24 +1636,28 @@ impl From<&Type> for PrintableType { let size = size.evaluate_to_u64().expect("Cannot print variable sized strings"); PrintableType::String { length: size } } - Type::FmtString(_, _) => PrintableType::FmtString {}, - Type::Error => PrintableType::Error {}, - Type::Unit => PrintableType::Unit {}, - Type::Constant(_) => PrintableType::Constant {}, + Type::FmtString(_, _) => unreachable!("format strings cannot be printed"), + Type::Error => unreachable!(), + Type::Unit => PrintableType::Unit, + Type::Constant(_) => unreachable!(), Type::Struct(def, ref args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); let fields = vecmap(fields, |(name, typ)| (name, typ.into())); PrintableType::Struct { fields, name: struct_type.name.to_string() } } - Type::TraitAsType(_, _) => PrintableType::TraitAsType {}, + Type::TraitAsType(_, _, _) => unreachable!(), Type::Tuple(types) => PrintableType::Tuple { types: vecmap(types, |typ| typ.into()) }, - Type::TypeVariable(_, _) => PrintableType::TypeVariable {}, - Type::NamedGeneric(..) => PrintableType::NamedGeneric {}, - Type::Forall(..) => PrintableType::Forall {}, - Type::Function(_, _, _) => PrintableType::Function, - Type::MutableReference(_) => PrintableType::MutableReference {}, - Type::NotConstant => PrintableType::NotConstant {}, + Type::TypeVariable(_, _) => unreachable!(), + Type::NamedGeneric(..) => unreachable!(), + Type::Forall(..) => unreachable!(), + Type::Function(_, _, env) => { + PrintableType::Function { env: Box::new(env.as_ref().into()) } + } + Type::MutableReference(typ) => { + PrintableType::MutableReference { typ: Box::new(typ.as_ref().into()) } + } + Type::NotConstant => unreachable!(), } } } diff --git a/compiler/noirc_frontend/src/lexer/token.rs b/compiler/noirc_frontend/src/lexer/token.rs index e6542c643ad..ab131ccd880 100644 --- a/compiler/noirc_frontend/src/lexer/token.rs +++ b/compiler/noirc_frontend/src/lexer/token.rs @@ -510,6 +510,7 @@ impl Attribute { Attribute::Secondary(SecondaryAttribute::ContractLibraryMethod) } ["event"] => Attribute::Secondary(SecondaryAttribute::Event), + ["export"] => Attribute::Secondary(SecondaryAttribute::Export), ["deprecated", name] => { if !name.starts_with('"') && !name.ends_with('"') { return Err(LexerErrorKind::MalformedFuncAttribute { @@ -588,6 +589,7 @@ pub enum SecondaryAttribute { // the entry point. ContractLibraryMethod, Event, + Export, Field(String), Custom(String), } @@ -602,6 +604,7 @@ impl fmt::Display for SecondaryAttribute { SecondaryAttribute::Custom(ref k) => write!(f, "#[{k}]"), SecondaryAttribute::ContractLibraryMethod => write!(f, "#[contract_library_method]"), SecondaryAttribute::Event => write!(f, "#[event]"), + SecondaryAttribute::Export => write!(f, "#[export]"), SecondaryAttribute::Field(ref k) => write!(f, "#[field({k})]"), } } @@ -625,7 +628,7 @@ impl AsRef for SecondaryAttribute { SecondaryAttribute::Deprecated(None) => "", SecondaryAttribute::Custom(string) | SecondaryAttribute::Field(string) => string, SecondaryAttribute::ContractLibraryMethod => "", - SecondaryAttribute::Event => "", + SecondaryAttribute::Event | SecondaryAttribute::Export => "", } } } diff --git a/compiler/noirc_frontend/src/lib.rs b/compiler/noirc_frontend/src/lib.rs index 976af5c9325..2cb05184497 100644 --- a/compiler/noirc_frontend/src/lib.rs +++ b/compiler/noirc_frontend/src/lib.rs @@ -17,6 +17,7 @@ pub mod lexer; pub mod monomorphization; pub mod node_interner; pub mod parser; +pub mod resolve_locations; pub mod hir; pub mod hir_def; diff --git a/compiler/noirc_frontend/src/monomorphization/ast.rs b/compiler/noirc_frontend/src/monomorphization/ast.rs index bc23ed3f5ba..71a9e04166c 100644 --- a/compiler/noirc_frontend/src/monomorphization/ast.rs +++ b/compiler/noirc_frontend/src/monomorphization/ast.rs @@ -356,7 +356,7 @@ impl std::fmt::Display for Type { }; write!(f, "fn({}) -> {}{}", args.join(", "), ret, closure_env_text) } - Type::Slice(element) => write!(f, "[{element}"), + Type::Slice(element) => write!(f, "[{element}]"), Type::MutableReference(element) => write!(f, "&mut {element}"), } } diff --git a/compiler/noirc_frontend/src/monomorphization/mod.rs b/compiler/noirc_frontend/src/monomorphization/mod.rs index 69968c3cbc0..308483d405d 100644 --- a/compiler/noirc_frontend/src/monomorphization/mod.rs +++ b/compiler/noirc_frontend/src/monomorphization/mod.rs @@ -26,8 +26,8 @@ use crate::{ }, node_interner::{self, DefinitionKind, NodeInterner, StmtId, TraitImplKind, TraitMethodId}, token::FunctionAttribute, - ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariableKind, - Visibility, + ContractFunctionType, FunctionKind, Type, TypeBinding, TypeBindings, TypeVariable, + TypeVariableKind, UnaryOp, Visibility, }; use self::ast::{Definition, FuncId, Function, LocalId, Program}; @@ -62,8 +62,9 @@ struct Monomorphizer<'interner> { /// confuse users. locals: HashMap, - /// Queue of functions to monomorphize next - queue: VecDeque<(node_interner::FuncId, FuncId, TypeBindings)>, + /// Queue of functions to monomorphize next each item in the queue is a tuple of: + /// (old_id, new_monomorphized_id, any type bindings to apply, the trait method if old_id is from a trait impl) + queue: VecDeque<(node_interner::FuncId, FuncId, TypeBindings, Option)>, /// When a function finishes being monomorphized, the monomorphized ast::Function is /// stored here along with its FuncId. @@ -98,6 +99,7 @@ type HirType = crate::Type; /// Note that there is no requirement on the `main` function that can be passed into /// this function. Typically, this is the function named "main" in the source project, /// but it can also be, for example, an arbitrary test function for running `nargo test`. +#[tracing::instrument(level = "trace", skip(main, interner))] pub fn monomorphize(main: node_interner::FuncId, interner: &mut NodeInterner) -> Program { monomorphize_option_debug(main, interner, None) } @@ -123,24 +125,27 @@ fn monomorphize_option_debug( }; while !monomorphizer.queue.is_empty() { - let (next_fn_id, new_id, bindings) = monomorphizer.queue.pop_front().unwrap(); + let (next_fn_id, new_id, bindings, trait_method) = monomorphizer.queue.pop_front().unwrap(); monomorphizer.locals.clear(); perform_instantiation_bindings(&bindings); + let impl_bindings = monomorphizer.perform_impl_bindings(trait_method, next_fn_id); monomorphizer.function(next_fn_id, new_id); + undo_instantiation_bindings(impl_bindings); undo_instantiation_bindings(bindings); } let functions = vecmap(monomorphizer.finished_functions, |(_, f)| f); let FuncMeta { return_distinctness, return_visibility, .. } = monomorphizer.interner.function_meta(&main); + Program::new( functions, function_sig, - return_distinctness, + *return_distinctness, monomorphizer.return_location, monomorphizer.debug_types.into(), - return_visibility, + *return_visibility, ) } @@ -183,6 +188,7 @@ impl<'interner> Monomorphizer<'interner> { id: node_interner::FuncId, expr_id: node_interner::ExprId, typ: &HirType, + trait_method: Option, ) -> Definition { let typ = typ.follow_bindings(); match self.globals.get(&id).and_then(|inner_map| inner_map.get(&typ)) { @@ -206,7 +212,7 @@ impl<'interner> Monomorphizer<'interner> { Definition::Builtin(opcode) } FunctionKind::Normal => { - let id = self.queue_function(id, expr_id, typ); + let id = self.queue_function(id, expr_id, typ, trait_method); Definition::Function(id) } FunctionKind::Oracle => { @@ -246,7 +252,7 @@ impl<'interner> Monomorphizer<'interner> { }, ); let main_meta = self.interner.function_meta(&main_id); - main_meta.into_function_signature() + main_meta.function_signature() } fn compile_main_debug( @@ -272,7 +278,7 @@ impl<'interner> Monomorphizer<'interner> { let body_expr_id = *self.interner.function(&f).as_expr(); let body_return_type = self.interner.id_type(body_expr_id); let return_type = self.convert_type(match meta.return_type() { - Type::TraitAsType(_, _) => &body_return_type, + Type::TraitAsType(_, _, _) => &body_return_type, _ => meta.return_type(), }); let unconstrained = modifiers.is_unconstrained @@ -281,10 +287,11 @@ impl<'interner> Monomorphizer<'interner> { }; let function = { - let parameters = self.parameters(meta_parameters).clone(); + let parameters = self.parameters(&meta_parameters).clone(); let body = self.expr(body_expr_id).clone(); ast::Function { id, name, parameters, body, return_type, unconstrained } }; + self.push_function(id, function); } @@ -295,17 +302,17 @@ impl<'interner> Monomorphizer<'interner> { /// Monomorphize each parameter, expanding tuple/struct patterns into multiple parameters /// and binding any generic types found. - fn parameters(&mut self, params: Parameters) -> Vec<(ast::LocalId, bool, String, ast::Type)> { + fn parameters(&mut self, params: &Parameters) -> Vec<(ast::LocalId, bool, String, ast::Type)> { let mut new_params = Vec::with_capacity(params.len()); - for parameter in params { - self.parameter(parameter.0, ¶meter.1, &mut new_params); + for (parameter, typ, _) in ¶ms.0 { + self.parameter(parameter, typ, &mut new_params); } new_params } fn parameter( &mut self, - param: HirPattern, + param: &HirPattern, typ: &HirType, new_params: &mut Vec<(ast::LocalId, bool, String, ast::Type)>, ) { @@ -317,11 +324,11 @@ impl<'interner> Monomorphizer<'interner> { new_params.push((new_id, definition.mutable, name, self.convert_type(typ))); self.define_local(ident.id, new_id); } - HirPattern::Mutable(pattern, _) => self.parameter(*pattern, typ, new_params), + HirPattern::Mutable(pattern, _) => self.parameter(pattern, typ, new_params), HirPattern::Tuple(fields, _) => { let tuple_field_types = unwrap_tuple_type(typ); - for (field, typ) in fields.into_iter().zip(tuple_field_types) { + for (field, typ) in fields.iter().zip(tuple_field_types) { self.parameter(field, &typ, new_params); } } @@ -329,7 +336,8 @@ impl<'interner> Monomorphizer<'interner> { let struct_field_types = unwrap_struct_type(typ); assert_eq!(struct_field_types.len(), fields.len()); - let mut fields = btree_map(fields, |(name, field)| (name.0.contents, field)); + let mut fields = + btree_map(fields, |(name, field)| (name.0.contents.clone(), field)); // Iterate over `struct_field_types` since `unwrap_struct_type` will always // return the fields in the order defined by the struct type. @@ -398,11 +406,37 @@ impl<'interner> Monomorphizer<'interner> { } HirExpression::Infix(infix) => { - let lhs = Box::new(self.expr(infix.lhs)); - let rhs = Box::new(self.expr(infix.rhs)); + let lhs = self.expr(infix.lhs); + let rhs = self.expr(infix.rhs); let operator = infix.operator.kind; let location = self.interner.expr_location(&expr); - ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + + if self.interner.get_selected_impl_for_expression(expr).is_some() { + // If an impl was selected for this infix operator, replace it + // with a method call to the appropriate trait impl method. + let lhs_type = self.interner.id_type(infix.lhs); + let args = vec![lhs_type.clone(), lhs_type]; + + // If this is a comparison operator, the result is a boolean but + // the actual method call returns an Ordering + use crate::BinaryOpKind::*; + let ret = if matches!(operator, Less | LessEqual | Greater | GreaterEqual) { + self.interner.ordering_type() + } else { + self.interner.id_type(expr) + }; + + let env = Box::new(Type::Unit); + let function_type = Type::Function(args, Box::new(ret.clone()), env); + + let method = infix.trait_method_id; + let func = self.resolve_trait_method_reference(expr, function_type, method); + self.create_operator_impl_call(func, lhs, infix.operator, rhs, ret, location) + } else { + let lhs = Box::new(lhs); + let rhs = Box::new(rhs); + ast::Expression::Binary(ast::Binary { lhs, rhs, operator, location }) + } } HirExpression::Index(index) => self.index(expr, index), @@ -441,16 +475,6 @@ impl<'interner> Monomorphizer<'interner> { HirExpression::Lambda(lambda) => self.lambda(lambda, expr), - HirExpression::TraitMethodReference(method) => { - if let Type::Function(_, _, _) = self.interner.id_type(expr) { - self.resolve_trait_method_reference(expr, method) - } else { - unreachable!( - "Calling a non-function, this should've been caught in typechecking" - ); - } - } - HirExpression::MethodCall(hir_method_call) => { unreachable!("Encountered HirExpression::MethodCall during monomorphization {hir_method_call:?}") } @@ -697,6 +721,12 @@ impl<'interner> Monomorphizer<'interner> { } fn ident(&mut self, ident: HirIdent, expr_id: node_interner::ExprId) -> ast::Expression { + let typ = self.interner.id_type(expr_id); + + if let ImplKind::TraitMethod(method, _, _) = ident.impl_kind { + return self.resolve_trait_method_reference(expr_id, typ, method); + } + let definition = self.interner.definition(ident.id); match &definition.kind { DefinitionKind::Function(func_id) => { @@ -704,7 +734,7 @@ impl<'interner> Monomorphizer<'interner> { let location = Some(ident.location); let name = definition.name.clone(); let typ = self.interner.id_type(expr_id); - let definition = self.lookup_function(*func_id, expr_id, &typ); + let definition = self.lookup_function(*func_id, expr_id, &typ, None); let typ = self.convert_type(&typ); let ident = ast::Ident { location, mutable, definition, name, typ: typ.clone() }; let ident_expression = ast::Expression::Ident(ident); @@ -868,22 +898,25 @@ impl<'interner> Monomorphizer<'interner> { fn resolve_trait_method_reference( &mut self, expr_id: node_interner::ExprId, + function_type: HirType, method: TraitMethodId, ) -> ast::Expression { - let function_type = self.interner.id_type(expr_id); - let trait_impl = self .interner - .get_selected_impl_for_ident(expr_id) + .get_selected_impl_for_expression(expr_id) .expect("ICE: missing trait impl - should be caught during type checking"); - let hir_func_id = match trait_impl { + let func_id = match trait_impl { node_interner::TraitImplKind::Normal(impl_id) => { self.interner.get_trait_implementation(impl_id).borrow().methods [method.method_index] } - node_interner::TraitImplKind::Assumed { object_type } => { - match self.interner.lookup_trait_implementation(&object_type, method.trait_id) { + node_interner::TraitImplKind::Assumed { object_type, trait_generics } => { + match self.interner.lookup_trait_implementation( + &object_type, + method.trait_id, + &trait_generics, + ) { Ok(TraitImplKind::Normal(impl_id)) => { self.interner.get_trait_implementation(impl_id).borrow().methods [method.method_index] @@ -905,14 +938,12 @@ impl<'interner> Monomorphizer<'interner> { } }; - let func_def = self.lookup_function(hir_func_id, expr_id, &function_type); - let func_id = match func_def { + let func_id = match self.lookup_function(func_id, expr_id, &function_type, Some(method)) { Definition::Function(func_id) => func_id, _ => unreachable!(), }; let the_trait = self.interner.get_trait(method.trait_id); - ast::Expression::Ident(ast::Ident { definition: Definition::Function(func_id), mutable: false, @@ -931,6 +962,133 @@ impl<'interner> Monomorphizer<'interner> { expr_id } + /// Update instrumentation code inserted on variable assignment. We need to + /// register the variable instance, its type and replace the temporary ID + /// (fe_var_id) with the ID of the registration. Multiple registrations of + /// the same variable are possible if using generic functions, hence the + /// temporary ID created when injecting the instrumentation code can map to + /// multiple IDs at runtime. + fn patch_debug_var_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [ast::Expression], + ) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))) = hir_arguments.get(0) else { + unreachable!("Missing FE var ID in __debug_var_assign call"); + }; + let Some(HirExpression::Ident(HirIdent { id, .. })) = hir_arguments.get(1) else { + unreachable!("Missing value identifier in __debug_var_assign call"); + }; + + // update variable assignments + let var_def = self.interner.definition(*id); + let var_type = self.interner.id_type(call.arguments[1]); + let fe_var_id = fe_var_id.to_u128() as u32; + let var_id = if var_def.name != "__debug_expr" { + self.debug_types.insert_var(fe_var_id, &var_def.name, var_type) + } else { + self.debug_types.get_var_id(fe_var_id).unwrap() + }; + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[0] = self.expr(interned_var_id); + } + + /// Update instrumentation code for a variable being dropped out of scope. + /// Given the fe_var_id we search for the last assigned runtime variable ID + /// and replace it instead. + fn patch_debug_var_drop( + &mut self, + call: &HirCallExpression, + arguments: &mut [ast::Expression], + ) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))) = hir_arguments.get(0) else { + unreachable!("Missing FE var ID in __debug_var_drop call"); + }; + // update variable drops (ie. when the var goes out of scope) + let fe_var_id = fe_var_id.to_u128() as u32; + if let Some(var_id) = self.debug_types.get_var_id(fe_var_id) { + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[0] = self.expr(interned_var_id); + } + } + + /// Update instrumentation code inserted when assigning to a member of an + /// existing variable. Same as above for replacing the fe_var_id, but also + /// we need to resolve the path and the type of the member being assigned. + /// For this last part, we need to resolve the mapping from field names in + /// structs to positions in the runtime tuple, since all structs are + /// replaced by tuples during compilation. + fn patch_debug_member_assign( + &mut self, + call: &HirCallExpression, + arguments: &mut [ast::Expression], + arity: usize, + ) { + let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); + let Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))) = hir_arguments.get(0) else { + unreachable!("Missing FE var ID in __debug_member_assign call"); + }; + let Some(HirExpression::Ident(HirIdent { id, .. })) = hir_arguments.get(1) else { + unreachable!("Missing value identifier in __debug_member_assign call"); + }; + // update variable member assignments + let var_def_name = self.interner.definition(*id).name.clone(); + let var_type = self.interner.id_type(call.arguments[2]); + let fe_var_id = fe_var_id.to_u128() as u32; + + let mut cursor_type = self + .debug_types + .get_type(fe_var_id) + .unwrap_or_else(|| panic!("type not found for fe_var_id={fe_var_id}")) + .clone(); + for i in 0..arity { + if let Some(HirExpression::Literal(HirLiteral::Integer(fe_i, i_neg))) = + hir_arguments.get(2 + i) + { + let mut index = fe_i.to_i128(); + if *i_neg { + index = -index; + } + if index < 0 { + let index = index.unsigned_abs(); + let field_name = self + .debug_field_names + .get(&(index as u32)) + .unwrap_or_else(|| panic!("field name not available for {i:?}")); + let field_i = (get_field(&cursor_type, field_name) + .unwrap_or_else(|| panic!("failed to find field_name: {field_name}")) + as i128) + .unsigned_abs(); + cursor_type = next_type(&cursor_type, field_i as usize); + let index_id = self.interner.push_expr(HirExpression::Literal( + HirLiteral::Integer(field_i.into(), false), + )); + self.interner.push_expr_type(&index_id, Type::FieldElement); + self.interner.push_expr_location( + index_id, + call.location.span, + call.location.file, + ); + arguments[2 + i] = self.expr(index_id); + } else { + cursor_type = next_type(&cursor_type, 0); + } + } else { + cursor_type = next_type(&cursor_type, 0); + } + } + + let var_id = if &var_def_name != "__debug_expr" { + self.debug_types.insert_var(fe_var_id, &var_def_name, var_type) + } else { + self.debug_types.get_var_id(fe_var_id).unwrap() + }; + let interned_var_id = self.intern_var_id(var_id, &call.location); + arguments[0] = self.expr(interned_var_id); + } + fn function_call( &mut self, call: HirCallExpression, @@ -939,102 +1097,19 @@ impl<'interner> Monomorphizer<'interner> { let original_func = Box::new(self.expr(call.func)); let mut arguments = vecmap(&call.arguments, |id| self.expr(*id)); let hir_arguments = vecmap(&call.arguments, |id| self.interner.expression(id)); - if let ast::Expression::Ident(ast::Ident { name, .. }) = original_func.as_ref() { - if let ( - Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))), - Some(HirExpression::Ident(HirIdent { id, .. })), - true, - ) = (hir_arguments.get(0), hir_arguments.get(1), name == "__debug_var_assign") - { - // update variable assignments - let var_def = self.interner.definition(*id); - let var_type = self.interner.id_type(call.arguments[1]); - let fe_var_id = fe_var_id.to_u128() as u32; - let var_id = if var_def.name != "__debug_expr" { - self.debug_types.insert_var(fe_var_id, &var_def.name, var_type) - } else { - self.debug_types.get_var_id(fe_var_id).unwrap() - }; - let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[0] = self.expr(interned_var_id); - } else if let (Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))), true) = - (hir_arguments.get(0), name == "__debug_var_drop") - { - // update variable drops (ie. when the var goes out of scope) - let fe_var_id = fe_var_id.to_u128() as u32; - if let Some(var_id) = self.debug_types.get_var_id(fe_var_id) { - let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[0] = self.expr(interned_var_id); - } - } else if let ( - Some(HirExpression::Literal(HirLiteral::Integer(fe_var_id, _))), - Some(HirExpression::Ident(HirIdent { id, .. })), - true, - ) = ( - hir_arguments.get(0), - hir_arguments.get(1), - name.starts_with(DEBUG_MEMBER_ASSIGN_PREFIX), - ) { - // update variable member assignments - let var_def_name = self.interner.definition(*id).name.clone(); - let var_type = self.interner.id_type(call.arguments[2]); - let fe_var_id = fe_var_id.to_u128() as u32; - let arity = name[DEBUG_MEMBER_ASSIGN_PREFIX.len()..] - .parse::() - .expect("failed to parse member assign arity"); - - let mut cursor_type = self - .debug_types - .get_type(fe_var_id) - .unwrap_or_else(|| panic!("type not found for fe_var_id={fe_var_id}")) - .clone(); - for i in 0..arity { - if let Some(HirExpression::Literal(HirLiteral::Integer(fe_i, i_neg))) = - hir_arguments.get(2 + i) - { - let mut index = fe_i.to_i128(); - if *i_neg { - index = -index; - } - if index < 0 { - let index = index.unsigned_abs(); - let field_name = self - .debug_field_names - .get(&(index as u32)) - .unwrap_or_else(|| panic!("field name not available for {i:?}")); - let field_i = - (get_field(&cursor_type, field_name).unwrap_or_else(|| { - panic!("failed to find field_name: {field_name}") - }) as i128) - .unsigned_abs(); - cursor_type = next_type(&cursor_type, field_i as usize); - let index_id = self.interner.push_expr(HirExpression::Literal( - HirLiteral::Integer(field_i.into(), false), - )); - self.interner.push_expr_type(&index_id, Type::FieldElement); - self.interner.push_expr_location( - index_id, - call.location.span, - call.location.file, - ); - arguments[2 + i] = self.expr(index_id); - } else { - cursor_type = next_type(&cursor_type, 0); - } - } else { - cursor_type = next_type(&cursor_type, 0); - } - } - let var_id = if &var_def_name != "__debug_expr" { - self.debug_types.insert_var(fe_var_id, &var_def_name, var_type) - } else { - self.debug_types.get_var_id(fe_var_id).unwrap() - }; - let interned_var_id = self.intern_var_id(var_id, &call.location); - arguments[0] = self.expr(interned_var_id); + // patch instrumentation code inserted for debugging + if let ast::Expression::Ident(ast::Ident { name, .. }) = original_func.as_ref() { + if name == "__debug_var_assign" { + self.patch_debug_var_assign(&call, &mut arguments); + } else if name == "__debug_var_drop" { + self.patch_debug_var_drop(&call, &mut arguments); + } else if let Some(arity) = name.strip_prefix(DEBUG_MEMBER_ASSIGN_PREFIX) { + let arity = arity.parse::().expect("failed to parse member assign arity"); + self.patch_debug_member_assign(&call, &mut arguments, arity); } } + let return_type = self.interner.id_type(id); let return_type = self.convert_type(&return_type); @@ -1100,7 +1175,7 @@ impl<'interner> Monomorphizer<'interner> { } /// Adds a function argument that contains type metadata that is required to tell - /// `println` how to convert values passed to an foreign call back to a human-readable string. + /// `println` how to convert values passed to an foreign call back to a human-readable string. /// The values passed to an foreign call will be a simple list of field elements, /// thus requiring extra metadata to correctly decode this list of elements. /// @@ -1147,11 +1222,16 @@ impl<'interner> Monomorphizer<'interner> { } fn append_printable_type_info_inner(typ: &Type, arguments: &mut Vec) { + // Disallow printing slices and mutable references for consistency, + // since they cannot be passed from ACIR into Brillig if let HirType::Array(size, _) = typ { if let HirType::NotConstant = **size { unreachable!("println does not support slices. Convert the slice to an array before passing it to println"); } + } else if matches!(typ, HirType::MutableReference(_)) { + unreachable!("println does not support mutable references."); } + let printable_type: PrintableType = typ.into(); let abi_as_string = serde_json::to_string(&printable_type) .expect("ICE: expected PrintableType to serialize"); @@ -1232,14 +1312,14 @@ impl<'interner> Monomorphizer<'interner> { id: node_interner::FuncId, expr_id: node_interner::ExprId, function_type: HirType, + trait_method: Option, ) -> FuncId { let new_id = self.next_function_id(); - self.define_global(id, function_type, new_id); + self.define_global(id, function_type.clone(), new_id); let bindings = self.interner.get_instantiation_bindings(expr_id); let bindings = self.follow_bindings(bindings); - - self.queue.push_back((id, new_id, bindings)); + self.queue.push_back((id, new_id, bindings, trait_method)); new_id } @@ -1309,7 +1389,7 @@ impl<'interner> Monomorphizer<'interner> { let parameters = vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); - let parameters = self.parameters(parameters); + let parameters = self.parameters(¶meters); let body = self.expr(lambda.body); let id = self.next_function_id(); @@ -1360,7 +1440,7 @@ impl<'interner> Monomorphizer<'interner> { let parameters = vecmap(lambda.parameters, |(pattern, typ)| (pattern, typ, Visibility::Private)).into(); - let mut converted_parameters = self.parameters(parameters); + let mut converted_parameters = self.parameters(¶meters); let id = self.next_function_id(); let name = lambda_name.to_owned(); @@ -1564,6 +1644,110 @@ impl<'interner> Monomorphizer<'interner> { ), }) } + + /// Call an operator overloading method for the given operator. + /// This function handles the special cases some operators have which don't map + /// 1 to 1 onto their operator function. For example: != requires a negation on + /// the result of its `eq` method, and the comparison operators each require a + /// conversion from the `Ordering` result to a boolean. + fn create_operator_impl_call( + &self, + func: ast::Expression, + lhs: ast::Expression, + operator: HirBinaryOp, + rhs: ast::Expression, + ret: Type, + location: Location, + ) -> ast::Expression { + let arguments = vec![lhs, rhs]; + let func = Box::new(func); + let return_type = self.convert_type(&ret); + + let mut result = + ast::Expression::Call(ast::Call { func, arguments, return_type, location }); + + use crate::BinaryOpKind::*; + match operator.kind { + // Negate the result of the == operation + NotEqual => { + result = ast::Expression::Unary(ast::Unary { + operator: UnaryOp::Not, + rhs: Box::new(result), + result_type: ast::Type::Bool, + location, + }); + } + // All the comparison operators require special handling since their `cmp` method + // returns an `Ordering` rather than a boolean value. + // + // (a < b) => a.cmp(b) == Ordering::Less + // (a <= b) => a.cmp(b) != Ordering::Greater + // (a > b) => a.cmp(b) == Ordering::Greater + // (a >= b) => a.cmp(b) != Ordering::Less + Less | LessEqual | Greater | GreaterEqual => { + // Comparing an Ordering directly to a field value in this way takes advantage + // of the fact the Ordering struct contains a single Field type, and our SSA + // pass will automatically unpack tuple values. + let ordering_value = if matches!(operator.kind, Less | GreaterEqual) { + FieldElement::zero() // Ordering::Less + } else { + 2u128.into() // Ordering::Greater + }; + + let operator = + if matches!(operator.kind, Less | Greater) { Equal } else { NotEqual }; + + let int_value = ast::Literal::Integer(ordering_value, ast::Type::Field, location); + let rhs = Box::new(ast::Expression::Literal(int_value)); + let lhs = Box::new(ast::Expression::ExtractTupleField(Box::new(result), 0)); + + result = ast::Expression::Binary(ast::Binary { lhs, operator, rhs, location }); + } + _ => (), + } + + result + } + + /// Call sites are instantiated against the trait method, but when an impl is later selected, + /// the corresponding method in the impl will have a different set of generics. `perform_impl_bindings` + /// is needed to apply the generics from the trait method to the impl method. Without this, + /// static method references to generic impls (e.g. `Eq::eq` for `[T; N]`) will fail to re-apply + /// the correct type bindings during monomorphization. + fn perform_impl_bindings( + &self, + trait_method: Option, + impl_method: node_interner::FuncId, + ) -> TypeBindings { + let mut bindings = TypeBindings::new(); + + if let Some(trait_method) = trait_method { + let the_trait = self.interner.get_trait(trait_method.trait_id); + + let trait_method_type = the_trait.methods[trait_method.method_index].typ.as_monotype(); + + // Make each NamedGeneric in this type bindable by replacing it with a TypeVariable + // with the same internal id and binding. + let (generics, impl_method_type) = + self.interner.function_meta(&impl_method).typ.unwrap_forall(); + + let replace_type_variable = |var: &TypeVariable| { + (var.id(), (var.clone(), Type::TypeVariable(var.clone(), TypeVariableKind::Normal))) + }; + + // Replace each NamedGeneric with a TypeVariable containing the same internal type variable + let type_bindings = generics.iter().map(replace_type_variable).collect(); + let impl_method_type = impl_method_type.force_substitute(&type_bindings); + + trait_method_type.try_unify(&impl_method_type, &mut bindings).unwrap_or_else(|_| { + unreachable!("Impl method type {} does not unify with trait method type {} during monomorphization", impl_method_type, trait_method_type) + }); + + perform_instantiation_bindings(&bindings); + } + + bindings + } } fn unwrap_tuple_type(typ: &HirType) -> Vec { diff --git a/compiler/noirc_frontend/src/node_interner.rs b/compiler/noirc_frontend/src/node_interner.rs index 236f1e0b513..b856b54f6ca 100644 --- a/compiler/noirc_frontend/src/node_interner.rs +++ b/compiler/noirc_frontend/src/node_interner.rs @@ -21,8 +21,8 @@ use crate::hir_def::{ }; use crate::token::{Attributes, SecondaryAttribute}; use crate::{ - ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, TypeAliasType, - TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, + BinaryOpKind, ContractFunctionType, FunctionDefinition, FunctionVisibility, Generics, Shared, + TypeAliasType, TypeBindings, TypeVariable, TypeVariableId, TypeVariableKind, }; /// An arbitrary number to limit the recursion depth when searching for trait impls. @@ -39,8 +39,8 @@ type StructAttributes = Vec; /// monomorphization - and it is not useful afterward. #[derive(Debug)] pub struct NodeInterner { - nodes: Arena, - func_meta: HashMap, + pub(crate) nodes: Arena, + pub(crate) func_meta: HashMap, function_definition_ids: HashMap, // For a given function ID, this gives the function's modifiers which includes @@ -52,7 +52,7 @@ pub struct NodeInterner { function_modules: HashMap, // Map each `Index` to it's own location - id_to_location: HashMap, + pub(crate) id_to_location: HashMap, // Maps each DefinitionId to a DefinitionInfo. definitions: Vec, @@ -78,21 +78,21 @@ pub struct NodeInterner { // // Map type aliases to the actual type. // When resolving types, check against this map to see if a type alias is defined. - type_aliases: Vec, + pub(crate) type_aliases: Vec, // Trait map. // // Each trait definition is possibly shared across multiple type nodes. // It is also mutated through the RefCell during name resolution to append // methods from impls to the type. - traits: HashMap, + pub(crate) traits: HashMap, // Trait implementation map // For each type that implements a given Trait ( corresponding TraitId), there should be an entry here // The purpose for this hashmap is to detect duplication of trait implementations ( if any ) // // Indexed by TraitImplIds - trait_implementations: Vec>, + pub(crate) trait_implementations: Vec>, /// Trait implementations on each type. This is expected to always have the same length as /// `self.trait_implementations`. @@ -110,6 +110,12 @@ pub struct NodeInterner { /// the context to get the concrete type of the object and select the correct impl itself. selected_trait_implementations: HashMap, + /// Holds the trait ids of the traits used for operator overloading + operator_traits: HashMap, + + /// The `Ordering` type is a semi-builtin type that is the result of the comparison traits. + ordering_type: Option, + /// Map from ExprId (referring to a Function/Method call) to its corresponding TypeBindings, /// filled out during type checking from instantiated variables. Used during monomorphization /// to map call site types back onto function parameter types, and undo this binding as needed. @@ -136,6 +142,13 @@ pub struct NodeInterner { // For trait implementation functions, this is their self type and trait they belong to func_id_to_trait: HashMap, + + /// A list of all type aliases that are referenced in the program. + /// Searched by LSP to resolve [Location]s of [TypeAliasType]s + pub(crate) type_alias_ref: Vec<(TypeAliasId, Location)>, + + /// Stores the [Location] of a [Type] reference + pub(crate) type_ref_locations: Vec<(Type, Location)>, } /// A trait implementation is either a normal implementation that is present in the source @@ -147,6 +160,19 @@ pub enum TraitImplKind { /// Assumed impls don't have an impl id since they don't link back to any concrete part of the source code. Assumed { object_type: Type, + + /// The trait generics to use - if specified. + /// This is allowed to be empty when they are inferred. E.g. for: + /// + /// ``` + /// trait Into { + /// fn into(self) -> T; + /// } + /// ``` + /// + /// The reference `Into::into(x)` would have inferred generics, but + /// `x.into()` with a `X: Into` in scope would not. + trait_generics: Vec, }, } @@ -344,7 +370,7 @@ partialeq!(StmtId); /// This data structure is never accessed directly, so API wise there is no difference between using /// Multiple arenas and a single Arena #[derive(Debug, Clone)] -enum Node { +pub(crate) enum Node { Function(HirFunction), Statement(HirStatement), Expression(HirExpression), @@ -423,12 +449,16 @@ impl Default for NodeInterner { trait_implementations: Vec::new(), trait_implementation_map: HashMap::new(), selected_trait_implementations: HashMap::new(), + operator_traits: HashMap::new(), + ordering_type: None, instantiation_bindings: HashMap::new(), field_indices: HashMap::new(), next_type_variable_id: std::cell::Cell::new(0), globals: HashMap::new(), struct_methods: HashMap::new(), primitive_methods: HashMap::new(), + type_alias_ref: Vec::new(), + type_ref_locations: Vec::new(), }; // An empty block expression is used often, we add this into the `node` on startup @@ -455,30 +485,6 @@ impl NodeInterner { self.id_to_location.insert(expr_id.into(), Location::new(span, file)); } - /// Scans the interner for the item which is located at that [Location] - /// - /// The [Location] may not necessarily point to the beginning of the item - /// so we check if the location's span is contained within the start or end - /// of each items [Span] - pub fn find_location_index(&self, location: Location) -> Option> { - let mut location_candidate: Option<(&Index, &Location)> = None; - - // Note: we can modify this in the future to not do a linear - // scan by storing a separate map of the spans or by sorting the locations. - for (index, interned_location) in self.id_to_location.iter() { - if interned_location.contains(&location) { - if let Some(current_location) = location_candidate { - if interned_location.span.is_smaller(¤t_location.1.span) { - location_candidate = Some((index, interned_location)); - } - } else { - location_candidate = Some((index, interned_location)); - } - } - } - location_candidate.map(|(index, _location)| *index) - } - /// Interns a HIR Function. pub fn push_fn(&mut self, func: HirFunction) -> FuncId { FuncId(self.nodes.insert(Node::Function(func))) @@ -496,14 +502,13 @@ impl NodeInterner { id: type_id, name: unresolved_trait.trait_def.name.clone(), crate_id: unresolved_trait.crate_id, - span: unresolved_trait.trait_def.span, + location: Location::new(unresolved_trait.trait_def.span, unresolved_trait.file_id), generics: vecmap(&unresolved_trait.trait_def.generics, |_| { // Temporary type variable ids before the trait is resolved to its actual ids. // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }), self_type_typevar_id, self_type_typevar: TypeVariable::unbound(self_type_typevar_id), @@ -533,8 +538,7 @@ impl NodeInterner { // This lets us record how many arguments the type expects so that other types // can refer to it with generic arguments before the generic parameters themselves // are resolved. - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) + TypeVariable::unbound(TypeVariableId(0)) }); let location = Location::new(typ.struct_def.span, file_id); @@ -550,17 +554,19 @@ impl NodeInterner { self.type_aliases.push(TypeAliasType::new( type_id, typ.type_alias_def.name.clone(), - typ.type_alias_def.span, + Location::new(typ.type_alias_def.span, typ.file_id), Type::Error, - vecmap(&typ.type_alias_def.generics, |_| { - let id = TypeVariableId(0); - (id, TypeVariable::unbound(id)) - }), + vecmap(&typ.type_alias_def.generics, |_| TypeVariable::unbound(TypeVariableId(0))), )); type_id } + /// Adds [TypeLiasId] and [Location] to the type_alias_ref vector + /// So that we can later resolve [Location]s type aliases from the LSP requests + pub fn add_type_alias_ref(&mut self, type_id: TypeAliasId, location: Location) { + self.type_alias_ref.push((type_id, location)); + } pub fn update_struct(&mut self, type_id: StructId, f: impl FnOnce(&mut StructType)) { let mut value = self.structs.get_mut(&type_id).unwrap().borrow_mut(); f(&mut value); @@ -605,6 +611,11 @@ impl NodeInterner { self.id_to_type.insert(definition_id.into(), typ); } + /// Store [Location] of [Type] reference + pub fn push_type_ref_location(&mut self, typ: Type, location: Location) { + self.type_ref_locations.push((typ, location)); + } + pub fn push_global(&mut self, stmt_id: StmtId, ident: Ident, local_id: LocalModuleId) { self.globals.insert(stmt_id, GlobalInfo { ident, local_id }); } @@ -759,12 +770,12 @@ impl NodeInterner { } /// Returns the interned meta data corresponding to `func_id` - pub fn function_meta(&self, func_id: &FuncId) -> FuncMeta { - self.func_meta.get(func_id).cloned().expect("ice: all function ids should have metadata") + pub fn function_meta(&self, func_id: &FuncId) -> &FuncMeta { + self.func_meta.get(func_id).expect("ice: all function ids should have metadata") } - pub fn try_function_meta(&self, func_id: &FuncId) -> Option { - self.func_meta.get(func_id).cloned() + pub fn try_function_meta(&self, func_id: &FuncId) -> Option<&FuncMeta> { + self.func_meta.get(func_id) } pub fn function_ident(&self, func_id: &FuncId) -> crate::Ident { @@ -957,6 +968,16 @@ impl NodeInterner { self.function_definition_ids[&function] } + /// Returns the DefinitionId of a trait's method, panics if the given trait method + /// is not a valid method of the trait or if the trait has not yet had + /// its methods ids set during name resolution. + pub fn trait_method_id(&self, trait_method: TraitMethodId) -> DefinitionId { + let the_trait = self.get_trait(trait_method.trait_id); + let method_name = &the_trait.methods[trait_method.method_index].name; + let function_id = the_trait.method_ids[&method_name.0.contents]; + self.function_definition_id(function_id) + } + /// Adds a non-trait method to a type. /// /// Returns `Some(duplicate)` if a matching method was already defined. @@ -1012,8 +1033,11 @@ impl NodeInterner { &self, object_type: &Type, trait_id: TraitId, + trait_generics: &[Type], ) -> Result> { - let (impl_kind, bindings) = self.try_lookup_trait_implementation(object_type, trait_id)?; + let (impl_kind, bindings) = + self.try_lookup_trait_implementation(object_type, trait_id, trait_generics)?; + Type::apply_type_bindings(bindings); Ok(impl_kind) } @@ -1023,11 +1047,13 @@ impl NodeInterner { &self, object_type: &Type, trait_id: TraitId, + trait_generics: &[Type], ) -> Result<(TraitImplKind, TypeBindings), Vec> { let mut bindings = TypeBindings::new(); let impl_kind = self.lookup_trait_implementation_helper( object_type, trait_id, + trait_generics, &mut bindings, IMPL_SEARCH_RECURSION_LIMIT, )?; @@ -1038,10 +1064,12 @@ impl NodeInterner { &self, object_type: &Type, trait_id: TraitId, + trait_generics: &[Type], type_bindings: &mut TypeBindings, recursion_limit: u32, ) -> Result> { - let make_constraint = || TraitConstraint::new(object_type.clone(), trait_id); + let make_constraint = + || TraitConstraint::new(object_type.clone(), trait_id, trait_generics.to_vec()); // Prevent infinite recursion when looking for impls if recursion_limit == 0 { @@ -1053,12 +1081,35 @@ impl NodeInterner { let impls = self.trait_implementation_map.get(&trait_id).ok_or_else(|| vec![make_constraint()])?; - for (existing_object_type, impl_kind) in impls { + for (existing_object_type2, impl_kind) in impls { + // Bug: We're instantiating only the object type's generics here, not all of the trait's generics like we need to let (existing_object_type, instantiation_bindings) = - existing_object_type.instantiate(self); + existing_object_type2.instantiate(self); let mut fresh_bindings = TypeBindings::new(); + let mut check_trait_generics = |impl_generics: &[Type]| { + trait_generics.iter().zip(impl_generics).all(|(trait_generic, impl_generic2)| { + let impl_generic = impl_generic2.substitute(&instantiation_bindings); + trait_generic.try_unify(&impl_generic, &mut fresh_bindings).is_ok() + }) + }; + + let generics_match = match impl_kind { + TraitImplKind::Normal(id) => { + let shared_impl = self.get_trait_implementation(*id); + let shared_impl = shared_impl.borrow(); + check_trait_generics(&shared_impl.trait_generics) + } + TraitImplKind::Assumed { trait_generics, .. } => { + check_trait_generics(trait_generics) + } + }; + + if !generics_match { + continue; + } + if object_type.try_unify(&existing_object_type, &mut fresh_bindings).is_ok() { // The unification was successful so we can append fresh_bindings to our bindings list type_bindings.extend(fresh_bindings); @@ -1095,12 +1146,21 @@ impl NodeInterner { recursion_limit: u32, ) -> Result<(), Vec> { for constraint in where_clause { - let constraint_type = constraint.typ.substitute(instantiation_bindings); + // Instantiation bindings are generally safe to force substitute into the same type. + // This is needed here to undo any bindings done to trait methods by monomorphization. + // Otherwise, an impl for (A, B) could get narrowed to only an impl for e.g. (u8, u16). + let constraint_type = constraint.typ.force_substitute(instantiation_bindings); let constraint_type = constraint_type.substitute(type_bindings); + let trait_generics = vecmap(&constraint.trait_generics, |generic| { + let generic = generic.force_substitute(instantiation_bindings); + generic.substitute(type_bindings) + }); + self.lookup_trait_implementation_helper( &constraint_type, constraint.trait_id, + &trait_generics, // Use a fresh set of type bindings here since the constraint_type originates from // our impl list, which we don't want to bind to. &mut TypeBindings::new(), @@ -1122,14 +1182,15 @@ impl NodeInterner { &mut self, object_type: Type, trait_id: TraitId, + trait_generics: Vec, ) -> bool { // Make sure there are no overlapping impls - if self.try_lookup_trait_implementation(&object_type, trait_id).is_ok() { + if self.try_lookup_trait_implementation(&object_type, trait_id, &trait_generics).is_ok() { return false; } let entries = self.trait_implementation_map.entry(trait_id).or_default(); - entries.push((object_type.clone(), TraitImplKind::Assumed { object_type })); + entries.push((object_type.clone(), TraitImplKind::Assumed { object_type, trait_generics })); true } @@ -1138,23 +1199,32 @@ impl NodeInterner { &mut self, object_type: Type, trait_id: TraitId, + trait_generics: Vec, impl_id: TraitImplId, + impl_generics: Generics, trait_impl: Shared, ) -> Result<(), (Span, FileId)> { assert_eq!(impl_id.0, self.trait_implementations.len(), "trait impl defined out of order"); self.trait_implementations.push(trait_impl.clone()); - // Ignoring overlapping TraitImplKind::Assumed impls here is perfectly fine. + // Replace each generic with a fresh type variable + let substitutions = impl_generics + .into_iter() + .map(|typevar| (typevar.id(), (typevar, self.next_type_variable()))) + .collect(); + + let instantiated_object_type = object_type.substitute(&substitutions); + + // Ignoring overlapping `TraitImplKind::Assumed` impls here is perfectly fine. // It should never happen since impls are defined at global scope, but even - // if they were, we should never prevent defining a new impl because a where + // if they were, we should never prevent defining a new impl because a 'where' // clause already assumes it exists. - let (instantiated_object_type, substitutions) = - object_type.instantiate_type_variables(self); - - if let Ok((TraitImplKind::Normal(existing), _)) = - self.try_lookup_trait_implementation(&instantiated_object_type, trait_id) - { + if let Ok((TraitImplKind::Normal(existing), _)) = self.try_lookup_trait_implementation( + &instantiated_object_type, + trait_id, + &trait_generics, + ) { let existing_impl = self.get_trait_implementation(existing); let existing_impl = existing_impl.borrow(); return Err((existing_impl.ident.span(), existing_impl.file)); @@ -1168,6 +1238,7 @@ impl NodeInterner { // The object type is generalized so that a generic impl will apply // to any type T, rather than just the generic type named T. let generalized_object_type = object_type.generalize_from_substitutions(substitutions); + let entries = self.trait_implementation_map.entry(trait_id).or_default(); entries.push((generalized_object_type, TraitImplKind::Normal(impl_id))); Ok(()) @@ -1191,12 +1262,12 @@ impl NodeInterner { method_name: &str, force_type_check: bool, ) -> Option { - let methods = self.struct_methods.get(&(id, method_name.to_owned()))?; + let methods = self.struct_methods.get(&(id, method_name.to_owned())); // If there is only one method, just return it immediately. // It will still be typechecked later. if !force_type_check { - if let Some(method) = methods.get_unambiguous() { + if let Some(method) = methods.and_then(|m| m.get_unambiguous()) { return Some(method); } } @@ -1208,10 +1279,10 @@ impl NodeInterner { fn find_matching_method( &self, typ: &Type, - methods: &Methods, + methods: Option<&Methods>, method_name: &str, ) -> Option { - if let Some(method) = methods.find_matching_method(typ, self) { + if let Some(method) = methods.and_then(|m| m.find_matching_method(typ, self)) { Some(method) } else { // Failed to find a match for the type in question, switch to looking at impls @@ -1226,7 +1297,7 @@ impl NodeInterner { pub fn lookup_primitive_method(&self, typ: &Type, method_name: &str) -> Option { let key = get_type_method_key(typ)?; let methods = self.primitive_methods.get(&(key, method_name.to_owned()))?; - self.find_matching_method(typ, methods, method_name) + self.find_matching_method(typ, Some(methods), method_name) } pub fn lookup_primitive_trait_method_mut( @@ -1254,102 +1325,98 @@ impl NodeInterner { /// Tags the given identifier with the selected trait_impl so that monomorphization /// can later recover which impl was selected, or alternatively see if it needs to /// decide which impl to select (because the impl was Assumed). - pub fn select_impl_for_ident(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { + pub fn select_impl_for_expression(&mut self, ident_id: ExprId, trait_impl: TraitImplKind) { self.selected_trait_implementations.insert(ident_id, trait_impl); } - /// Retrieves the impl selected for a given IdentId during name resolution. - /// From type checking and on, the "ident" referred to is changed to a TraitMethodReference node. - pub fn get_selected_impl_for_ident(&self, ident_id: ExprId) -> Option { + /// Retrieves the impl selected for a given ExprId during name resolution. + pub fn get_selected_impl_for_expression(&self, ident_id: ExprId) -> Option { self.selected_trait_implementations.get(&ident_id).cloned() } - /// For a given [Index] we return [Location] to which we resolved to - /// We currently return None for features not yet implemented - /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve - pub(crate) fn resolve_location(&self, index: impl Into) -> Option { - let node = self.nodes.get(index.into())?; - - match node { - Node::Function(func) => self.resolve_location(func.as_expr()), - Node::Expression(expression) => self.resolve_expression_location(expression), - _ => None, - } - } - - /// Resolves the [Location] of the definition for a given [HirExpression] - /// - /// Note: current the code returns None because some expressions are not yet implemented. - fn resolve_expression_location(&self, expression: &HirExpression) -> Option { - match expression { - HirExpression::Ident(ident) => { - let definition_info = self.definition(ident.id); - match definition_info.kind { - DefinitionKind::Function(func_id) => { - Some(self.function_meta(&func_id).location) - } - DefinitionKind::Local(_local_id) => Some(definition_info.location), - _ => None, - } - } - HirExpression::Constructor(expr) => { - let struct_type = &expr.r#type.borrow(); + /// Retrieves the trait id for a given binary operator. + /// All binary operators correspond to a trait - although multiple may correspond + /// to the same trait (such as `==` and `!=`). + /// `self.operator_traits` is expected to be filled before name resolution, + /// during definition collection. + pub fn get_operator_trait_method(&self, operator: BinaryOpKind) -> TraitMethodId { + let trait_id = self.operator_traits[&operator]; + + // Assume that the operator's method to be overloaded is the first method of the trait. + TraitMethodId { trait_id, method_index: 0 } + } + + /// Add the given trait as an operator trait if its name matches one of the + /// operator trait names (Add, Sub, ...). + pub fn try_add_operator_trait(&mut self, trait_id: TraitId) { + let the_trait = self.get_trait(trait_id); + + let operator = match the_trait.name.0.contents.as_str() { + "Add" => BinaryOpKind::Add, + "Sub" => BinaryOpKind::Subtract, + "Mul" => BinaryOpKind::Multiply, + "Div" => BinaryOpKind::Divide, + "Rem" => BinaryOpKind::Modulo, + "Eq" => BinaryOpKind::Equal, + "Ord" => BinaryOpKind::Less, + "BitAnd" => BinaryOpKind::And, + "BitOr" => BinaryOpKind::Or, + "BitXor" => BinaryOpKind::Xor, + "Shl" => BinaryOpKind::ShiftLeft, + "Shr" => BinaryOpKind::ShiftRight, + _ => return, + }; - eprintln!("\n -> Resolve Constructor {struct_type:?}\n"); + self.operator_traits.insert(operator, trait_id); - Some(struct_type.location) - } - HirExpression::MemberAccess(expr_member_access) => { - self.resolve_struct_member_access(expr_member_access) + // Some operators also require we insert a matching entry for related operators + match operator { + BinaryOpKind::Equal => { + self.operator_traits.insert(BinaryOpKind::NotEqual, trait_id); } - HirExpression::Call(expr_call) => { - let func = expr_call.func; - self.resolve_location(func) + BinaryOpKind::Less => { + self.operator_traits.insert(BinaryOpKind::LessEqual, trait_id); + self.operator_traits.insert(BinaryOpKind::Greater, trait_id); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, trait_id); + + let the_trait = self.get_trait(trait_id); + self.ordering_type = match &the_trait.methods[0].typ { + Type::Forall(_, typ) => match typ.as_ref() { + Type::Function(_, return_type, _) => Some(return_type.as_ref().clone()), + other => unreachable!("Expected function type for `cmp`, found {}", other), + }, + other => unreachable!("Expected Forall type for `cmp`, found {}", other), + }; } - - _ => None, + _ => (), } } - /// Resolves the [Location] of the definition for a given [crate::hir_def::expr::HirMemberAccess] - /// This is used to resolve the location of a struct member access. - /// For example, in the expression `foo.bar` we want to resolve the location of `bar` - /// to the location of the definition of `bar` in the struct `foo`. - fn resolve_struct_member_access( - &self, - expr_member_access: &crate::hir_def::expr::HirMemberAccess, - ) -> Option { - let expr_lhs = &expr_member_access.lhs; - let expr_rhs = &expr_member_access.rhs; - - let found_ident = self.nodes.get(expr_lhs.into())?; - - let ident = match found_ident { - Node::Expression(HirExpression::Ident(ident)) => ident, - _ => return None, - }; - - let definition_info = self.definition(ident.id); - - let local_id = match definition_info.kind { - DefinitionKind::Local(Some(local_id)) => local_id, - _ => return None, - }; - - let constructor_expression = match self.nodes.get(local_id.into()) { - Some(Node::Expression(HirExpression::Constructor(constructor_expression))) => { - constructor_expression - } - _ => return None, - }; - - let struct_type = constructor_expression.r#type.borrow(); - let field_names = struct_type.field_names(); - - match field_names.iter().find(|field_name| field_name.0 == expr_rhs.0) { - Some(found) => Some(Location::new(found.span(), struct_type.location.file)), - None => None, - } + /// This function is needed when creating a NodeInterner for testing so that calls + /// to `get_operator_trait` do not panic when the stdlib isn't present. + #[cfg(test)] + pub fn populate_dummy_operator_traits(&mut self) { + let dummy_trait = TraitId(ModuleId::dummy_id()); + self.operator_traits.insert(BinaryOpKind::Add, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Subtract, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Multiply, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Divide, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Modulo, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Equal, dummy_trait); + self.operator_traits.insert(BinaryOpKind::NotEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Less, dummy_trait); + self.operator_traits.insert(BinaryOpKind::LessEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Greater, dummy_trait); + self.operator_traits.insert(BinaryOpKind::GreaterEqual, dummy_trait); + self.operator_traits.insert(BinaryOpKind::And, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Or, dummy_trait); + self.operator_traits.insert(BinaryOpKind::Xor, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftLeft, dummy_trait); + self.operator_traits.insert(BinaryOpKind::ShiftRight, dummy_trait); + } + + pub(crate) fn ordering_type(&self) -> Type { + self.ordering_type.clone().expect("Expected ordering_type to be set in the NodeInterner") } } diff --git a/compiler/noirc_frontend/src/parser/errors.rs b/compiler/noirc_frontend/src/parser/errors.rs index 09dc6dfff8d..5c869ff4719 100644 --- a/compiler/noirc_frontend/src/parser/errors.rs +++ b/compiler/noirc_frontend/src/parser/errors.rs @@ -26,6 +26,8 @@ pub enum ParserErrorReason { EarlyReturn, #[error("Patterns aren't allowed in a trait's function declarations")] PatternInTraitFunctionParameter, + #[error("Modifiers are ignored on a trait impl method")] + TraitImplFunctionModifiers, #[error("comptime keyword is deprecated")] ComptimeDeprecated, #[error("{0} are experimental and aren't fully supported yet")] @@ -148,6 +150,11 @@ impl From for Diagnostic { "".into(), error.span, ), + ParserErrorReason::TraitImplFunctionModifiers => Diagnostic::simple_warning( + reason.to_string(), + "".into(), + error.span, + ), ParserErrorReason::ExpectedPatternButFoundType(ty) => { Diagnostic::simple_error("Expected a ; separating these two statements".into(), format!("{ty} is a type and cannot be used as a variable name"), error.span) } diff --git a/compiler/noirc_frontend/src/parser/mod.rs b/compiler/noirc_frontend/src/parser/mod.rs index a6c631895cd..0ff7819c00f 100644 --- a/compiler/noirc_frontend/src/parser/mod.rs +++ b/compiler/noirc_frontend/src/parser/mod.rs @@ -208,7 +208,7 @@ fn force<'a, T: 'a>(parser: impl NoirParser + 'a) -> impl NoirParser, pub functions: Vec, @@ -344,6 +344,7 @@ impl std::fmt::Display for SortedSubModule { } } +#[derive(Clone)] pub struct SortedSubModule { pub name: Ident, pub contents: SortedModule, diff --git a/compiler/noirc_frontend/src/parser/parser.rs b/compiler/noirc_frontend/src/parser/parser.rs index a97637642af..cdfdc570949 100644 --- a/compiler/noirc_frontend/src/parser/parser.rs +++ b/compiler/noirc_frontend/src/parser/parser.rs @@ -312,7 +312,7 @@ fn function_return_type() -> impl NoirParser<((Distinctness, Visibility), Functi fn attribute() -> impl NoirParser { token_kind(TokenKind::Attribute).map(|token| match token { Token::Attribute(attribute) => attribute, - _ => unreachable!(), + _ => unreachable!("Parser should have already errored due to token not being an attribute"), }) } @@ -369,7 +369,7 @@ fn function_parameters<'a>(allow_self: bool) -> impl NoirParser> + 'a /// This parser always parses no input and fails fn nothing() -> impl NoirParser { - one_of([]).map(|_| unreachable!()) + one_of([]).map(|_| unreachable!("parser should always error")) } fn self_parameter() -> impl NoirParser { @@ -413,13 +413,7 @@ fn trait_definition() -> impl NoirParser { .then_ignore(just(Token::LeftBrace)) .then(trait_body()) .then_ignore(just(Token::RightBrace)) - .validate(|(((name, generics), where_clause), items), span, emit| { - if !generics.is_empty() { - emit(ParserError::with_reason( - ParserErrorReason::ExperimentalFeature("Generic traits"), - span, - )); - } + .map_with_span(|(((name, generics), where_clause), items), span| { TopLevelStatement::Trait(NoirTrait { name, generics, where_clause, span, items }) }) } @@ -613,7 +607,18 @@ fn trait_implementation() -> impl NoirParser { } fn trait_implementation_body() -> impl NoirParser> { - let function = function_definition(true).map(TraitImplItem::Function); + let function = function_definition(true).validate(|mut f, span, emit| { + if f.def().is_internal + || f.def().is_unconstrained + || f.def().is_open + || f.def().visibility != FunctionVisibility::Private + { + emit(ParserError::with_reason(ParserErrorReason::TraitImplFunctionModifiers, span)); + } + // Trait impl functions are always public + f.def_mut().visibility = FunctionVisibility::Public; + TraitImplItem::Function(f) + }); let alias = keyword(Keyword::Type) .ignore_then(ident()) @@ -1119,14 +1124,7 @@ fn int_type() -> impl NoirParser { Err(ParserError::expected_label(ParsingRuleLabel::IntegerType, unexpected, span)) } })) - .validate(|(_, token), span, emit| { - let typ = UnresolvedTypeData::from_int_token(token).with_span(span); - if let UnresolvedTypeData::Integer(crate::Signedness::Signed, _) = &typ.typ { - let reason = ParserErrorReason::ExperimentalFeature("Signed integer types"); - emit(ParserError::with_reason(reason, span)); - } - typ - }) + .map_with_span(|(_, token), span| UnresolvedTypeData::from_int_token(token).with_span(span)) } fn named_type(type_parser: impl NoirParser) -> impl NoirParser { diff --git a/compiler/noirc_frontend/src/resolve_locations.rs b/compiler/noirc_frontend/src/resolve_locations.rs new file mode 100644 index 00000000000..cfb88966b9d --- /dev/null +++ b/compiler/noirc_frontend/src/resolve_locations.rs @@ -0,0 +1,217 @@ +use arena::Index; +use noirc_errors::Location; + +use crate::hir_def::expr::HirExpression; +use crate::hir_def::types::Type; + +use crate::node_interner::{DefinitionKind, Node, NodeInterner}; + +impl NodeInterner { + /// Scans the interner for the item which is located at that [Location] + /// + /// The [Location] may not necessarily point to the beginning of the item + /// so we check if the location's span is contained within the start or end + /// of each items [Span] + pub fn find_location_index(&self, location: Location) -> Option> { + let mut location_candidate: Option<(&Index, &Location)> = None; + + // Note: we can modify this in the future to not do a linear + // scan by storing a separate map of the spans or by sorting the locations. + for (index, interned_location) in self.id_to_location.iter() { + if interned_location.contains(&location) { + if let Some(current_location) = location_candidate { + if interned_location.span.is_smaller(¤t_location.1.span) { + location_candidate = Some((index, interned_location)); + } + } else { + location_candidate = Some((index, interned_location)); + } + } + } + location_candidate.map(|(index, _location)| *index) + } + + /// Returns the [Location] of the definition of the given Ident found at [Span] of the given [FileId]. + /// Returns [None] when definition is not found. + pub fn get_definition_location_from( + &self, + location: Location, + return_type_location_instead: bool, + ) -> Option { + self.find_location_index(location) + .and_then(|index| self.resolve_location(index, return_type_location_instead)) + .or_else(|| self.try_resolve_trait_impl_location(location)) + .or_else(|| self.try_resolve_trait_method_declaration(location)) + .or_else(|| self.try_resolve_type_ref(location)) + .or_else(|| self.try_resolve_type_alias(location)) + } + + pub fn get_declaration_location_from(&self, location: Location) -> Option { + self.try_resolve_trait_method_declaration(location).or_else(|| { + self.find_location_index(location) + .and_then(|index| self.resolve_location(index, false)) + .and_then(|found_impl_location| { + self.try_resolve_trait_method_declaration(found_impl_location) + }) + }) + } + + /// For a given [Index] we return [Location] to which we resolved to + /// We currently return None for features not yet implemented + /// TODO(#3659): LSP goto def should error when Ident at Location could not resolve + fn resolve_location( + &self, + index: impl Into, + return_type_location_instead: bool, + ) -> Option { + if return_type_location_instead { + return self.get_type_location_from_index(index); + } + + let node = self.nodes.get(index.into())?; + + match node { + Node::Function(func) => { + self.resolve_location(func.as_expr(), return_type_location_instead) + } + Node::Expression(expression) => { + self.resolve_expression_location(expression, return_type_location_instead) + } + _ => None, + } + } + + fn get_type_location_from_index(&self, index: impl Into) -> Option { + match self.id_type(index.into()) { + Type::Struct(struct_type, _) => Some(struct_type.borrow().location), + _ => None, + } + } + + /// Resolves the [Location] of the definition for a given [HirExpression] + /// + /// Note: current the code returns None because some expressions are not yet implemented. + fn resolve_expression_location( + &self, + expression: &HirExpression, + return_type_location_instead: bool, + ) -> Option { + match expression { + HirExpression::Ident(ident) => { + let definition_info = self.definition(ident.id); + match definition_info.kind { + DefinitionKind::Function(func_id) => { + Some(self.function_meta(&func_id).location) + } + DefinitionKind::Local(_local_id) => Some(definition_info.location), + DefinitionKind::Global(_global_id) => Some(definition_info.location), + _ => None, + } + } + HirExpression::Constructor(expr) => { + let struct_type = &expr.r#type.borrow(); + Some(struct_type.location) + } + HirExpression::MemberAccess(expr_member_access) => { + self.resolve_struct_member_access(expr_member_access) + } + HirExpression::Call(expr_call) => { + let func = expr_call.func; + self.resolve_location(func, return_type_location_instead) + } + + _ => None, + } + } + + /// Resolves the [Location] of the definition for a given [crate::hir_def::expr::HirMemberAccess] + /// This is used to resolve the location of a struct member access. + /// For example, in the expression `foo.bar` we want to resolve the location of `bar` + /// to the location of the definition of `bar` in the struct `foo`. + fn resolve_struct_member_access( + &self, + expr_member_access: &crate::hir_def::expr::HirMemberAccess, + ) -> Option { + let expr_lhs = &expr_member_access.lhs; + let expr_rhs = &expr_member_access.rhs; + + let lhs_self_struct = match self.id_type(expr_lhs) { + Type::Struct(struct_type, _) => struct_type, + _ => return None, + }; + + let struct_type = lhs_self_struct.borrow(); + let field_names = struct_type.field_names(); + + field_names.iter().find(|field_name| field_name.0 == expr_rhs.0).map(|found_field_name| { + Location::new(found_field_name.span(), struct_type.location.file) + }) + } + + /// Attempts to resolve [Location] of [Trait] based on [Location] of [TraitImpl] + /// This is used by LSP to resolve the location of a trait based on the location of a trait impl. + /// + /// Example: + /// impl Foo for Bar { ... } -> trait Foo { ... } + fn try_resolve_trait_impl_location(&self, location: Location) -> Option { + self.trait_implementations + .iter() + .find(|shared_trait_impl| { + let trait_impl = shared_trait_impl.borrow(); + trait_impl.file == location.file && trait_impl.ident.span().contains(&location.span) + }) + .and_then(|shared_trait_impl| { + let trait_impl = shared_trait_impl.borrow(); + self.traits.get(&trait_impl.trait_id).map(|trait_| trait_.location) + }) + } + + /// Attempts to resolve [Location] of [Trait]'s [TraitFunction] declaration based on [Location] of [TraitFunction] call. + /// + /// This is used by LSP to resolve the location. + /// + /// ### Example: + /// ```nr + /// trait Fieldable { + /// fn to_field(self) -> Field; + /// ^------------------------------\ + /// } | + /// | + /// fn main_func(x: u32) { | + /// assert(x.to_field() == 15); | + /// \......................./ + /// } + /// ``` + /// + fn try_resolve_trait_method_declaration(&self, location: Location) -> Option { + self.func_meta + .iter() + .find(|(_, func_meta)| func_meta.location.contains(&location)) + .and_then(|(func_id, _func_meta)| { + let (_, trait_id) = self.get_function_trait(func_id)?; + + let mut methods = self.traits.get(&trait_id)?.methods.iter(); + let method = + methods.find(|method| method.name.0.contents == self.function_name(func_id)); + method.map(|method| method.location) + }) + } + + /// Attempts to resolve [Location] of [Type] based on [Location] of reference in code + pub(crate) fn try_resolve_type_ref(&self, location: Location) -> Option { + self.type_ref_locations + .iter() + .find(|(_typ, type_ref_location)| type_ref_location.contains(&location)) + .and_then(|(typ, _)| match typ { + Type::Struct(struct_typ, _) => Some(struct_typ.borrow().location), + _ => None, + }) + } + + fn try_resolve_type_alias(&self, location: Location) -> Option { + self.type_alias_ref + .iter() + .find(|(_, named_type_location)| named_type_location.span.contains(&location.span)) + .map(|(type_alias_id, _found_location)| self.get_type_alias(*type_alias_id).location) + } +} diff --git a/compiler/noirc_frontend/src/tests.rs b/compiler/noirc_frontend/src/tests.rs index 3f4755aa0ef..58eba9fd8e7 100644 --- a/compiler/noirc_frontend/src/tests.rs +++ b/compiler/noirc_frontend/src/tests.rs @@ -21,7 +21,6 @@ mod test { use crate::hir::Context; use crate::node_interner::{NodeInterner, StmtId}; - use crate::graph::CrateGraph; use crate::hir::def_collector::dc_crate::DefCollector; use crate::hir_def::expr::HirExpression; use crate::hir_def::stmt::HirStatement; @@ -53,8 +52,8 @@ mod test { ) -> (ParsedModule, Context, Vec<(CompilationError, FileId)>) { let root = std::path::Path::new("/"); let fm = FileManager::new(root); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + let mut context = Context::new(fm, Default::default()); + context.def_interner.populate_dummy_operator_traits(); let root_file_id = FileId::dummy(); let root_crate_id = context.crate_graph.add_crate_root(root_file_id); let (program, parser_errors) = parse_program(src); @@ -90,6 +89,56 @@ mod test { get_program(src).2 } + #[test] + fn check_trait_implemented_for_all_t() { + let src = " + trait Default { + fn default() -> Self; + } + + trait Eq { + fn eq(self, other: Self) -> bool; + } + + trait IsDefault { + fn is_default(self) -> bool; + } + + impl IsDefault for T where T: Default + Eq { + fn is_default(self) -> bool { + self.eq(T::default()) + } + } + + struct Foo { + a: u64, + } + + impl Eq for Foo { + fn eq(self, other: Foo) -> bool { self.a == other.a } + } + + impl Default for u64 { + fn default() -> Self { + 0 + } + } + + impl Default for Foo { + fn default() -> Self { + Foo { a: Default::default() } + } + } + + fn main(a: Foo) -> pub bool { + a.is_default() + }"; + + let errors = get_program_errors(src); + errors.iter().for_each(|err| println!("{:?}", err)); + assert!(errors.is_empty()); + } + #[test] fn check_trait_implementation_duplicate_method() { let src = " @@ -1077,9 +1126,9 @@ mod test { } fn check_rewrite(src: &str, expected: &str) { - let (_program, context, _errors) = get_program(src); + let (_program, mut context, _errors) = get_program(src); let main_func_id = context.def_interner.find_function("main").unwrap(); - let program = monomorphize(main_func_id, &context.def_interner); + let program = monomorphize(main_func_id, &mut context.def_interner); assert!(format!("{}", program) == expected); } diff --git a/compiler/noirc_printable_type/Cargo.toml b/compiler/noirc_printable_type/Cargo.toml index 5f2eea92257..fbbe778e561 100644 --- a/compiler/noirc_printable_type/Cargo.toml +++ b/compiler/noirc_printable_type/Cargo.toml @@ -14,5 +14,6 @@ regex = "1.9.1" serde.workspace = true serde_json.workspace = true thiserror.workspace = true +jsonrpc.workspace = true [dev-dependencies] diff --git a/compiler/noirc_printable_type/src/lib.rs b/compiler/noirc_printable_type/src/lib.rs index 910604d0000..bae3b1e43a3 100644 --- a/compiler/noirc_printable_type/src/lib.rs +++ b/compiler/noirc_printable_type/src/lib.rs @@ -32,41 +32,13 @@ pub enum PrintableType { String { length: u64, }, - FmtString {}, - Error {}, - Unit {}, - Constant {}, - TraitAsType {}, - TypeVariable {}, - NamedGeneric {}, - Forall {}, - Function, - MutableReference {}, - NotConstant {}, -} - -impl PrintableType { - /// Returns the number of field elements required to represent the type once encoded. - pub fn field_count(&self) -> Option { - match self { - Self::Field - | Self::SignedInteger { .. } - | Self::UnsignedInteger { .. } - | Self::Boolean - | Self::Function => Some(1), - Self::Array { length, typ } => { - length.and_then(|len| typ.field_count().map(|x| x * (len as u32))) - } - Self::Tuple { types } => types - .iter() - .fold(Some(0), |count, typ| count.and_then(|c| typ.field_count().map(|fc| c + fc))), - Self::Struct { fields, .. } => fields.iter().fold(Some(0), |count, (_, field_type)| { - count.and_then(|c| field_type.field_count().map(|fc| c + fc)) - }), - Self::String { length } => Some(*length as u32), - _ => Some(0), - } - } + Function { + env: Box, + }, + MutableReference { + typ: Box, + }, + Unit, } /// This is what all formats eventually transform into @@ -95,6 +67,9 @@ pub enum ForeignCallError { #[error("Could not parse PrintableType argument. {0}")] ParsingError(#[from] serde_json::Error), + + #[error("Failed calling external resolver. {0}")] + ExternalResolverError(#[from] jsonrpc::Error), } impl TryFrom<&[ForeignCallParam]> for PrintableValueDisplay { @@ -133,47 +108,26 @@ fn convert_string_inputs( fn convert_fmt_string_inputs( foreign_call_inputs: &[ForeignCallParam], ) -> Result { - let (message, input_and_printable_values) = + let (message, input_and_printable_types) = foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; let message_as_fields = vecmap(message.values(), |value| value.to_field()); let message_as_string = decode_string_value(&message_as_fields); - let (num_values, input_and_printable_values) = input_and_printable_values + let (num_values, input_and_printable_types) = input_and_printable_types .split_first() .ok_or(ForeignCallError::MissingForeignCallInputs)?; let mut output = Vec::new(); let num_values = num_values.unwrap_value().to_field().to_u128() as usize; - for (i, printable_value) in input_and_printable_values + let types_start_at = input_and_printable_types.len() - num_values; + let mut input_iter = input_and_printable_types[0..types_start_at] .iter() - .skip(input_and_printable_values.len() - num_values) - .enumerate() - { - let printable_type = fetch_printable_type(printable_value)?; - let field_count = printable_type.field_count(); - let value = match (field_count, &printable_type) { - (_, PrintableType::Array { .. } | PrintableType::String { .. }) => { - // Arrays and strings are represented in a single value vector rather than multiple separate input values - let mut input_values_as_fields = input_and_printable_values[i] - .values() - .into_iter() - .map(|value| value.to_field()); - decode_value(&mut input_values_as_fields, &printable_type) - } - (Some(type_size), _) => { - // We must use a flat map here as each value in a struct will be in a separate input value - let mut input_values_as_fields = input_and_printable_values - [i..(i + (type_size as usize))] - .iter() - .flat_map(|param| vecmap(param.values(), |value| value.to_field())); - decode_value(&mut input_values_as_fields, &printable_type) - } - (None, _) => { - panic!("unexpected None field_count for type {printable_type:?}"); - } - }; + .flat_map(|param| vecmap(param.values(), |value| value.to_field())); + for printable_type in input_and_printable_types.iter().skip(types_start_at) { + let printable_type = fetch_printable_type(printable_type)?; + let value = decode_value(&mut input_iter, &printable_type); output.push((value, printable_type)); } @@ -219,7 +173,7 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push_str("false"); } } - (PrintableValue::Field(_), PrintableType::Function) => { + (PrintableValue::Field(_), PrintableType::Function { .. }) => { output.push_str("<>"); } (_, PrintableType::MutableReference { .. }) => { @@ -276,6 +230,8 @@ fn to_string(value: &PrintableValue, typ: &PrintableType) -> Option { output.push(')'); } + (_, PrintableType::Unit) => output.push_str("()"), + _ => return None, }; @@ -346,14 +302,12 @@ pub fn decode_value( PrintableType::Field | PrintableType::SignedInteger { .. } | PrintableType::UnsignedInteger { .. } - | PrintableType::Boolean - | PrintableType::Function => { + | PrintableType::Boolean => { let field_element = field_iterator.next().unwrap(); PrintableValue::Field(field_element) } PrintableType::Array { length: None, typ } => { - // TODO: maybe the len is the first arg? not sure let length = field_iterator .next() .expect("not enough data to decode variable array length") @@ -393,7 +347,18 @@ pub fn decode_value( PrintableValue::Struct(struct_map) } - _ => PrintableValue::Other, + PrintableType::Function { env } => { + let field_element = field_iterator.next().unwrap(); + let func_ref = PrintableValue::Field(field_element); + // we want to consume the fields from the environment, but for now they are not actually printed + decode_value(field_iterator, env); + func_ref + } + PrintableType::MutableReference { typ } => { + // we decode the reference, but it's not really used for printing + decode_value(field_iterator, typ) + } + PrintableType::Unit => PrintableValue::Field(FieldElement::zero()), } } diff --git a/compiler/utils/iter-extended/src/lib.rs b/compiler/utils/iter-extended/src/lib.rs index aef89b58b30..d2ef271aebf 100644 --- a/compiler/utils/iter-extended/src/lib.rs +++ b/compiler/utils/iter-extended/src/lib.rs @@ -44,7 +44,7 @@ where } /// Given an iterator over a Result, filter out the Ok values from the Err values -/// and return both in separate Vecs. Unlike other collect-like functions over Results, +/// and return both in separate `Vec`s. Unlike other collect-like functions over Results, /// this function will always consume the entire iterator. pub fn partition_results(iterable: It, mut f: F) -> (Vec, Vec) where diff --git a/compiler/wasm/.eslintrc.js b/compiler/wasm/.eslintrc.js index 33335c2a877..5a2cc7f1ec0 100644 --- a/compiler/wasm/.eslintrc.js +++ b/compiler/wasm/.eslintrc.js @@ -1,3 +1,3 @@ module.exports = { - extends: ["../../.eslintrc.js"], + extends: ['../../.eslintrc.js'], }; diff --git a/compiler/wasm/.gitignore b/compiler/wasm/.gitignore index f968dafbcc3..3ae8fb4b218 100644 --- a/compiler/wasm/.gitignore +++ b/compiler/wasm/.gitignore @@ -1 +1,3 @@ -noir-script/target +dist +build + diff --git a/compiler/wasm/.mocharc.json b/compiler/wasm/.mocharc.json index 5e3ee32d901..8009c66f9a8 100644 --- a/compiler/wasm/.mocharc.json +++ b/compiler/wasm/.mocharc.json @@ -1,7 +1,13 @@ { - "extension": [ - "ts" - ], - "spec": "test/node/**/*.test.ts", - "require": "ts-node/register" -} \ No newline at end of file + "require": "ts-node/register", + "extensions": [ + "ts" + ], + "spec": [ + "./test/**/!(browser)/*.test.ts" + ], + "node-option": [ + "loader=ts-node" + ] + } + \ No newline at end of file diff --git a/compiler/wasm/Cargo.toml b/compiler/wasm/Cargo.toml index 58ad7764fdc..a20efeeed8a 100644 --- a/compiler/wasm/Cargo.toml +++ b/compiler/wasm/Cargo.toml @@ -18,18 +18,24 @@ nargo.workspace = true noirc_driver.workspace = true noirc_frontend.workspace = true noirc_errors.workspace = true +noirc_evaluator.workspace = true wasm-bindgen.workspace = true serde.workspace = true js-sys.workspace = true console_error_panic_hook.workspace = true gloo-utils.workspace = true -log.workspace = true - -wasm-logger = "0.2.0" +tracing-subscriber.workspace = true +tracing-web.workspace = true # This is an unused dependency, we are adding it # so that we can enable the js feature in getrandom. getrandom = { workspace = true, features = ["js"] } +# This is an unused dependency, we are adding it +# so that we can enable the debug-embed feature in rust-embed. +# This is needed for rust-embed to include the stdlib sources in dev mode +# while simultaneously allowing us to deactivate wasm-opt for speed. +rust-embed = { workspace = true, features = ["debug-embed"] } + [build-dependencies] -build-data.workspace = true +build-data.workspace = true \ No newline at end of file diff --git a/compiler/wasm/build.sh b/compiler/wasm/build.sh deleted file mode 100755 index 24af149bcea..00000000000 --- a/compiler/wasm/build.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash - -function require_command { - if ! command -v "$1" >/dev/null 2>&1; then - echo "Error: $1 is required but not installed." >&2 - exit 1 - fi -} -function check_installed { - if ! command -v "$1" >/dev/null 2>&1; then - echo "$1 is not installed. Please install it." >&2 - return 1 - fi - return 0 -} -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} - -require_command jq -require_command cargo -require_command wasm-bindgen -check_installed wasm-opt - -self_path=$(dirname "$(readlink -f "$0")") -export pname=$(cargo read-manifest | jq -r '.name') -export CARGO_TARGET_DIR=$self_path/target - -rm -rf $self_path/outputs >/dev/null 2>&1 -rm -rf $self_path/result >/dev/null 2>&1 - -if [ -n "$out" ]; then - echo "Will install package to $out (defined outside installPhase.sh script)" -else - export out="$self_path/outputs/out" - echo "Will install package to $out" -fi - -run_or_fail $self_path/buildPhaseCargoCommand.sh -run_or_fail $self_path/installPhase.sh - -ln -s $out $self_path/result diff --git a/compiler/wasm/buildPhaseCargoCommand.sh b/compiler/wasm/buildPhaseCargoCommand.sh deleted file mode 100755 index 2ab0f1eb3cb..00000000000 --- a/compiler/wasm/buildPhaseCargoCommand.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash - -function run_or_fail { - "$@" - local status=$? - if [ $status -ne 0 ]; then - echo "Command '$*' failed with exit code $status" >&2 - exit $status - fi -} -function run_if_available { - if command -v "$1" >/dev/null 2>&1; then - "$@" - else - echo "$1 is not installed. Please install it to use this feature." >&2 - fi -} - -export self_path=$(dirname "$(readlink -f "$0")") - -# Clear out the existing build artifacts as these aren't automatically removed by wasm-pack. -if [ -d ./pkg/ ]; then - rm -rf $self_path/pkg/ -fi - -TARGET=wasm32-unknown-unknown -WASM_BINARY=$CARGO_TARGET_DIR/$TARGET/release/${pname}.wasm - -NODE_DIR=$self_path/nodejs/ -BROWSER_DIR=$self_path/web/ -NODE_WASM=${NODE_DIR}/${pname}_bg.wasm -BROWSER_WASM=${BROWSER_DIR}/${pname}_bg.wasm - -# Build the new wasm package -run_or_fail cargo build --lib --release --target $TARGET --package ${pname} ${cargoExtraArgs} -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $NODE_DIR --typescript --target nodejs -run_or_fail wasm-bindgen $WASM_BINARY --out-dir $BROWSER_DIR --typescript --target web -run_if_available wasm-opt $NODE_WASM -o $NODE_WASM -O -run_if_available wasm-opt $BROWSER_WASM -o $BROWSER_WASM -O \ No newline at end of file diff --git a/compiler/wasm/installPhase.sh b/compiler/wasm/installPhase.sh deleted file mode 100755 index e5be98a3339..00000000000 --- a/compiler/wasm/installPhase.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env bash -export self_path=$(dirname "$(readlink -f "$0")") - -export out_path=$out/noir_wasm - -mkdir -p $out_path -cp $self_path/README.md $out_path/ -cp $self_path/package.json $out_path/ -cp -r $self_path/nodejs $out_path/ -cp -r $self_path/web $out_path/ diff --git a/compiler/wasm/package.json b/compiler/wasm/package.json index b57f700b661..2aaf4a494df 100644 --- a/compiler/wasm/package.json +++ b/compiler/wasm/package.json @@ -1,40 +1,85 @@ { "name": "@noir-lang/noir_wasm", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.20.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", - "main": "./nodejs/noir_wasm.js", - "types": "./web/noir_wasm.d.ts", - "module": "./web/noir_wasm.js", + "main": "dist/main.js", + "types": "./dist/types/src/index.d.cts", + "exports": { + "node": "./dist/node/main.js", + "import": "./dist/web/main.mjs", + "require": "./dist/node/main.js", + "types": "./dist/types/src/index.d.cts", + "default": "./dist/web/main.mjs" + }, "files": [ - "nodejs", - "web", + "dist", "package.json" ], "sideEffects": false, + "homepage": "https://noir-lang.org/", "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" }, "scripts": { - "build": "bash ./build.sh", - "test": "yarn test:node && yarn test:browser", - "test:node": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\"}' mocha", + "install:wasm_pack": "./scripts/install_wasm-pack.sh", + "build": "yarn install:wasm_pack && WASM_OPT=$(./scripts/command-check.sh wasm-opt) webpack", + "test": "yarn test:build_fixtures && yarn test:node && yarn test:browser", + "test:build_fixtures": "./scripts/build-fixtures.sh", "test:browser": "web-test-runner", - "clean": "chmod u+w web nodejs || true && rm -rf ./nodejs ./web ./target ./result", + "test:browser:docker": "docker run --rm -v $(cd ../.. && pwd):/usr/src/noir -w /usr/src/noir/compiler/wasm mcr.microsoft.com/playwright:v1.40.0-jammy yarn test:browser", + "test:node": "NODE_NO_WARNINGS=1 mocha --config ./.mocharc.json", + "clean": "rm -rf ./build ./target ./dist public/fixtures/simple/target public/fixtures/with-deps/target", "nightly:version": "jq --arg new_version \"-$(git rev-parse --short HEAD)$1\" '.version = .version + $new_version' package.json > package-tmp.json && mv package-tmp.json package.json", "publish": "echo 📡 publishing `$npm_package_name` && yarn npm publish", - "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", - "build:nix": "nix build -L .#noir_wasm", - "install:from:nix": "yarn clean && yarn build:nix && cp -rL ./result/noir_wasm/nodejs ./ && cp -rL ./result/noir_wasm/web ./" + "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "devDependencies": { "@esm-bundle/chai": "^4.3.4-fix.0", + "@ltd/j-toml": "^1.38.0", + "@noir-lang/noirc_abi": "workspace:*", + "@types/adm-zip": "^0.5.0", + "@types/chai": "^4", + "@types/mocha": "^10.0.6", + "@types/mocha-each": "^2", + "@types/node": "^20.10.5", + "@types/pako": "^2", + "@types/path-browserify": "^1", + "@types/readable-stream": "^4", + "@types/sinon": "^17", + "@wasm-tool/wasm-pack-plugin": "^1.7.0", "@web/dev-server-esbuild": "^0.3.6", - "@web/test-runner": "^0.15.3", - "@web/test-runner-playwright": "^0.10.0", - "mocha": "^10.2.0" + "@web/test-runner": "^0.18.0", + "@web/test-runner-playwright": "^0.11.0", + "adm-zip": "^0.5.0", + "assert": "^2.1.0", + "browserify-fs": "^1.0.0", + "chai": "^4.3.10", + "copy-webpack-plugin": "^11.0.0", + "html-webpack-plugin": "^5.5.4", + "memfs": "^4.6.0", + "mocha": "^10.2.0", + "mocha-each": "^2.0.1", + "path-browserify": "^1.0.1", + "process": "^0.11.10", + "readable-stream": "^4.4.2", + "sinon": "^17.0.1", + "ts-loader": "^9.5.1", + "ts-node": "^10.9.1", + "typescript": "~5.2.2", + "unzipit": "^1.4.3", + "url": "^0.11.3", + "webpack": "^5.49.0", + "webpack-cli": "^4.7.2" + }, + "dependencies": { + "pako": "^2.1.0" } } diff --git a/compiler/wasm/scripts/build-fixtures.sh b/compiler/wasm/scripts/build-fixtures.sh new file mode 100755 index 00000000000..3a2330d4726 --- /dev/null +++ b/compiler/wasm/scripts/build-fixtures.sh @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +nargo compile --program-dir ./test/fixtures/simple +nargo compile --program-dir ./test/fixtures/with-deps +nargo compile --program-dir ./test/fixtures/noir-contract \ No newline at end of file diff --git a/compiler/wasm/scripts/command-check.sh b/compiler/wasm/scripts/command-check.sh new file mode 100755 index 00000000000..51d342a8bd0 --- /dev/null +++ b/compiler/wasm/scripts/command-check.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +command -v $1 >/dev/null 2>&1 && echo "true" || { echo >&2 "$1 is not installed" && echo "false"; } \ No newline at end of file diff --git a/compiler/wasm/scripts/install_wasm-pack.sh b/compiler/wasm/scripts/install_wasm-pack.sh new file mode 100755 index 00000000000..28721e62fe2 --- /dev/null +++ b/compiler/wasm/scripts/install_wasm-pack.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash +set -eu + +cd $(dirname "$0")/.. + +# Install wasm-pack +CARGO_BINSTALL_CHECK=$(./scripts/command-check.sh cargo-binstall) +if [ $CARGO_BINSTALL_CHECK != "true" ]; then + curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash +fi + +cargo-binstall wasm-pack@0.12.1 -y \ No newline at end of file diff --git a/compiler/wasm/src/circuit.rs b/compiler/wasm/src/circuit.rs deleted file mode 100644 index fdd9a7d9a20..00000000000 --- a/compiler/wasm/src/circuit.rs +++ /dev/null @@ -1,18 +0,0 @@ -use acvm::acir::circuit::Circuit; -use gloo_utils::format::JsValueSerdeExt; -use wasm_bindgen::prelude::*; - -// Deserializes bytes into ACIR structure -#[wasm_bindgen] -pub fn acir_read_bytes(bytes: Vec) -> JsValue { - console_error_panic_hook::set_once(); - let circuit = Circuit::deserialize_circuit(&bytes).unwrap(); - ::from_serde(&circuit).unwrap() -} - -#[wasm_bindgen] -pub fn acir_write_bytes(acir: JsValue) -> Vec { - console_error_panic_hook::set_once(); - let circuit: Circuit = JsValueSerdeExt::into_serde(&acir).unwrap(); - Circuit::serialize_circuit(&circuit) -} diff --git a/compiler/wasm/src/compile.rs b/compiler/wasm/src/compile.rs index 13b366819b0..498ffe447ce 100644 --- a/compiler/wasm/src/compile.rs +++ b/compiler/wasm/src/compile.rs @@ -2,17 +2,18 @@ use fm::FileManager; use gloo_utils::format::JsValueSerdeExt; use js_sys::{JsString, Object}; use nargo::artifacts::{ - contract::{PreprocessedContract, PreprocessedContractFunction}, - debug::DebugArtifact, - program::PreprocessedProgram, + contract::{ContractArtifact, ContractFunctionArtifact}, + program::ProgramArtifact, }; use noirc_driver::{ - add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, - CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + add_dep, compile_contract, compile_main, file_manager_with_stdlib, prepare_crate, + prepare_dependency, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; +use noirc_evaluator::errors::SsaReport; use noirc_frontend::{ - graph::{CrateGraph, CrateId, CrateName}, - hir::Context, + graph::{CrateId, CrateName}, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use serde::Deserialize; use std::{collections::HashMap, path::Path}; @@ -20,8 +21,6 @@ use wasm_bindgen::prelude::*; use crate::errors::{CompileError, JsCompileError}; -const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; - #[wasm_bindgen(typescript_custom_section)] const DEPENDENCY_GRAPH: &'static str = r#" export type DependencyGraph = { @@ -29,37 +28,30 @@ export type DependencyGraph = { library_dependencies: Readonly>; } -export type CompiledContract = { +export type ContractArtifact = { noir_version: string; name: string; - backend: string; functions: Array; events: Array; + file_map: Record; }; -export type CompiledProgram = { +export type ProgramArtifact = { noir_version: string; - backend: string; + hash: number; abi: any; bytecode: string; + debug_symbols: any; + file_map: Record; } -export type DebugArtifact = { - debug_symbols: Array; - file_map: Record; - warnings: Array; -}; +type WarningsCompileResult = { warnings: Array; }; -export type CompileResult = ( - | { - contract: CompiledContract; - debug: DebugArtifact; - } - | { - program: CompiledProgram; - debug: DebugArtifact; - } -); +export type ContractCompileResult = { contract: CompiledContract; } & WarningsCompileResult; + +export type ProgramCompileResult = { program: CompiledProgram; } & WarningsCompileResult; + +export type CompileResult = ContractCompileResult | ProgramCompileResult; "#; #[wasm_bindgen] @@ -79,38 +71,36 @@ extern "C" { impl JsCompileResult { const CONTRACT_PROP: &'static str = "contract"; const PROGRAM_PROP: &'static str = "program"; - const DEBUG_PROP: &'static str = "debug"; + const WARNINGS_PROP: &'static str = "warnings"; pub fn new(resp: CompileResult) -> JsCompileResult { let obj = JsCompileResult::constructor(); match resp { - CompileResult::Contract { contract, debug } => { + CompileResult::Contract { contract, warnings } => { js_sys::Reflect::set( &obj, &JsString::from(JsCompileResult::CONTRACT_PROP), &::from_serde(&contract).unwrap(), ) .unwrap(); - js_sys::Reflect::set( &obj, - &JsString::from(JsCompileResult::DEBUG_PROP), - &::from_serde(&debug).unwrap(), + &JsString::from(JsCompileResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), ) .unwrap(); } - CompileResult::Program { program, debug } => { + CompileResult::Program { program, warnings } => { js_sys::Reflect::set( &obj, &JsString::from(JsCompileResult::PROGRAM_PROP), &::from_serde(&program).unwrap(), ) .unwrap(); - js_sys::Reflect::set( &obj, - &JsString::from(JsCompileResult::DEBUG_PROP), - &::from_serde(&debug).unwrap(), + &JsString::from(JsCompileResult::WARNINGS_PROP), + &::from_serde(&warnings).unwrap(), ) .unwrap(); } @@ -120,12 +110,11 @@ impl JsCompileResult { } } -#[derive(Deserialize)] -struct DependencyGraph { - root_dependencies: Vec, - library_dependencies: HashMap>, +#[derive(Deserialize, Default)] +pub(crate) struct DependencyGraph { + pub(crate) root_dependencies: Vec, + pub(crate) library_dependencies: HashMap>, } - #[wasm_bindgen] // This is a map containing the paths of all of the files in the entry-point crate and // the transitive dependencies of the entry-point crate. @@ -133,7 +122,7 @@ struct DependencyGraph { // This is for all intents and purposes the file system that the compiler will use to resolve/compile // files in the crate being compiled and its dependencies. #[derive(Deserialize, Default)] -pub struct PathToFileSourceMap(HashMap); +pub struct PathToFileSourceMap(pub(crate) HashMap); #[wasm_bindgen] impl PathToFileSourceMap { @@ -151,9 +140,13 @@ impl PathToFileSourceMap { } } +pub(crate) fn parse_all(fm: &FileManager) -> ParsedFiles { + fm.as_file_map().all_file_ids().map(|&file_id| (file_id, parse_file(fm, file_id))).collect() +} + pub enum CompileResult { - Contract { contract: PreprocessedContract, debug: DebugArtifact }, - Program { program: PreprocessedProgram, debug: DebugArtifact }, + Contract { contract: ContractArtifact, warnings: Vec }, + Program { program: ProgramArtifact, warnings: Vec }, } #[wasm_bindgen] @@ -173,9 +166,8 @@ pub fn compile( }; let fm = file_manager_with_source_map(file_source_map); - - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + let parsed_files = parse_all(&fm); + let mut context = Context::new(fm, parsed_files); let path = Path::new(&entry_point); let crate_id = prepare_crate(&mut context, path); @@ -184,10 +176,8 @@ pub fn compile( let compile_options = CompileOptions::default(); - // For now we default to plonk width = 3, though we can add it as a parameter - let np_language = acvm::Language::PLONKCSat { width: 3 }; - #[allow(deprecated)] - let is_opcode_supported = acvm::pwg::default_is_opcode_supported(np_language); + // For now we default to a bounded width of 3, though we can add it as a parameter + let expression_width = acvm::ExpressionWidth::Bounded { width: 3 }; if contracts.unwrap_or_default() { let compiled_contract = compile_contract(&mut context, crate_id, &compile_options) @@ -200,14 +190,12 @@ pub fn compile( })? .0; - let optimized_contract = - nargo::ops::optimize_contract(compiled_contract, np_language, &is_opcode_supported) - .expect("Contract optimization failed"); + let optimized_contract = nargo::ops::optimize_contract(compiled_contract, expression_width); - let compile_output = preprocess_contract(optimized_contract); + let compile_output = generate_contract_artifact(optimized_contract); Ok(JsCompileResult::new(compile_output)) } else { - let compiled_program = compile_main(&mut context, crate_id, &compile_options, None, true) + let compiled_program = compile_main(&mut context, crate_id, &compile_options, None) .map_err(|errs| { CompileError::with_file_diagnostics( "Failed to compile program", @@ -217,11 +205,9 @@ pub fn compile( })? .0; - let optimized_program = - nargo::ops::optimize_program(compiled_program, np_language, &is_opcode_supported) - .expect("Program optimization failed"); + let optimized_program = nargo::ops::optimize_program(compiled_program, expression_width); - let compile_output = preprocess_program(optimized_program); + let compile_output = generate_program_artifact(optimized_program); Ok(JsCompileResult::new(compile_output)) } } @@ -234,9 +220,9 @@ pub fn compile( // // For all intents and purposes, the file manager being returned // should be considered as immutable. -fn file_manager_with_source_map(source_map: PathToFileSourceMap) -> FileManager { +pub(crate) fn file_manager_with_source_map(source_map: PathToFileSourceMap) -> FileManager { let root = Path::new(""); - let mut fm = FileManager::new(root); + let mut fm = file_manager_with_stdlib(root); for (path, source) in source_map.0 { fm.add_file_with_source(path.as_path(), source); @@ -283,73 +269,44 @@ fn add_noir_lib(context: &mut Context, library_name: &CrateName) -> CrateId { prepare_dependency(context, &path_to_lib) } -fn preprocess_program(program: CompiledProgram) -> CompileResult { - let debug_artifact = DebugArtifact { - debug_symbols: vec![program.debug], - file_map: program.file_map, - warnings: program.warnings, - }; - - let preprocessed_program = PreprocessedProgram { - hash: program.hash, - backend: String::from(BACKEND_IDENTIFIER), - abi: program.abi, - noir_version: NOIR_ARTIFACT_VERSION_STRING.to_string(), - bytecode: program.circuit, - }; - - CompileResult::Program { program: preprocessed_program, debug: debug_artifact } +pub(crate) fn generate_program_artifact(program: CompiledProgram) -> CompileResult { + let warnings = program.warnings.clone(); + CompileResult::Program { program: program.into(), warnings } } -fn preprocess_contract(contract: CompiledContract) -> CompileResult { - let debug_artifact = DebugArtifact { - debug_symbols: contract.functions.iter().map(|function| function.debug.clone()).collect(), - file_map: contract.file_map, - warnings: contract.warnings, - }; - let preprocessed_functions = contract - .functions - .into_iter() - .map(|func| PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, - abi: func.abi, - bytecode: func.bytecode, - }) - .collect(); - - let preprocessed_contract = PreprocessedContract { +pub(crate) fn generate_contract_artifact(contract: CompiledContract) -> CompileResult { + let functions = contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(); + + let contract_artifact = ContractArtifact { noir_version: String::from(NOIR_ARTIFACT_VERSION_STRING), name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_functions, + functions, events: contract.events, + file_map: contract.file_map, }; - CompileResult::Contract { contract: preprocessed_contract, debug: debug_artifact } + CompileResult::Contract { contract: contract_artifact, warnings: contract.warnings } } #[cfg(test)] mod test { use noirc_driver::prepare_crate; - use noirc_frontend::{ - graph::{CrateGraph, CrateName}, - hir::Context, - }; + use noirc_frontend::{graph::CrateName, hir::Context}; use crate::compile::PathToFileSourceMap; - use super::{file_manager_with_source_map, process_dependency_graph, DependencyGraph}; + use super::{ + file_manager_with_source_map, parse_all, process_dependency_graph, DependencyGraph, + }; use std::{collections::HashMap, path::Path}; - fn setup_test_context(source_map: PathToFileSourceMap) -> Context { + fn setup_test_context(source_map: PathToFileSourceMap) -> Context<'static, 'static> { let mut fm = file_manager_with_source_map(source_map); // Add this due to us calling prepare_crate on "/main.nr" below fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); + let mut context = Context::new(fm, parsed_files); prepare_crate(&mut context, Path::new("/main.nr")); context diff --git a/compiler/wasm/src/compile_new.rs b/compiler/wasm/src/compile_new.rs new file mode 100644 index 00000000000..6476f6d29bc --- /dev/null +++ b/compiler/wasm/src/compile_new.rs @@ -0,0 +1,344 @@ +use crate::compile::{ + file_manager_with_source_map, generate_contract_artifact, generate_program_artifact, parse_all, + JsCompileResult, PathToFileSourceMap, +}; +use crate::errors::{CompileError, JsCompileError}; +use noirc_driver::{ + add_dep, compile_contract, compile_main, prepare_crate, prepare_dependency, CompileOptions, +}; +use noirc_frontend::{ + graph::{CrateId, CrateName}, + hir::Context, +}; +use std::path::Path; +use wasm_bindgen::prelude::wasm_bindgen; + +/// This is a wrapper class that is wasm-bindgen compatible +/// We do not use js_name and rename it like CrateId because +/// then the impl block is not picked up in javascript. +#[wasm_bindgen] +pub struct CompilerContext { + // `wasm_bindgen` currently doesn't allow lifetime parameters on structs so we must use a `'static` lifetime. + // `Context` must then own the `FileManager` to satisfy this lifetime. + context: Context<'static, 'static>, +} + +#[wasm_bindgen(js_name = "CrateId")] +#[derive(Debug, Copy, Clone)] +pub struct CrateIDWrapper(CrateId); + +#[wasm_bindgen] +impl CompilerContext { + #[wasm_bindgen(constructor)] + pub fn new(source_map: PathToFileSourceMap) -> CompilerContext { + console_error_panic_hook::set_once(); + + let fm = file_manager_with_source_map(source_map); + let parsed_files = parse_all(&fm); + + CompilerContext { context: Context::new(fm, parsed_files) } + } + + #[cfg(test)] + pub(crate) fn crate_graph(&self) -> &noirc_frontend::graph::CrateGraph { + &self.context.crate_graph + } + #[cfg(test)] + pub(crate) fn root_crate_id(&self) -> CrateIDWrapper { + CrateIDWrapper(*self.context.root_crate_id()) + } + + // Processes the root crate by adding it to the package graph and automatically + // importing the stdlib as a dependency for it. + // + // Its ID in the package graph is returned + pub fn process_root_crate(&mut self, path_to_crate: String) -> CrateIDWrapper { + let path_to_crate = Path::new(&path_to_crate); + + // Adds the root crate to the crate graph and returns its crate id + CrateIDWrapper(prepare_crate(&mut self.context, path_to_crate)) + } + + pub fn process_dependency_crate(&mut self, path_to_crate: String) -> CrateIDWrapper { + let path_to_crate = Path::new(&path_to_crate); + + // Adds the root crate to the crate graph and returns its crate id + CrateIDWrapper(prepare_dependency(&mut self.context, path_to_crate)) + } + + // Adds a named edge from one crate to the other. + // + // For example, lets say we have two crates CrateId1 and CrateId2 + // This function will add an edge from CrateId1 to CrateId2 and the edge will be named `crate_name` + // + // This essentially says that CrateId1 depends on CrateId2 and the dependency is named `crate_name` + // + // We pass references to &CrateIdWrapper even though it is a copy because Rust's move semantics are + // not respected once we use javascript. ie it will actually allocated a new object in javascript + // then deallocate that object if we do not pass as a reference. + pub fn add_dependency_edge( + &mut self, + crate_name: String, + from: &CrateIDWrapper, + to: &CrateIDWrapper, + ) -> Result<(), JsCompileError> { + let parsed_crate_name: CrateName = + crate_name.parse().map_err(|err_string| JsCompileError::new(err_string, Vec::new()))?; + + add_dep(&mut self.context, from.0, to.0, parsed_crate_name); + Ok(()) + } + + pub fn compile_program( + mut self, + program_width: usize, + ) -> Result { + let compile_options = CompileOptions::default(); + let np_language = acvm::ExpressionWidth::Bounded { width: program_width }; + + let root_crate_id = *self.context.root_crate_id(); + + let compiled_program = + compile_main(&mut self.context, root_crate_id, &compile_options, None) + .map_err(|errs| { + CompileError::with_file_diagnostics( + "Failed to compile program", + errs, + &self.context.file_manager, + ) + })? + .0; + + let optimized_program = nargo::ops::optimize_program(compiled_program, np_language); + + let compile_output = generate_program_artifact(optimized_program); + Ok(JsCompileResult::new(compile_output)) + } + + pub fn compile_contract( + mut self, + program_width: usize, + ) -> Result { + let compile_options = CompileOptions::default(); + let np_language = acvm::ExpressionWidth::Bounded { width: program_width }; + let root_crate_id = *self.context.root_crate_id(); + + let compiled_contract = + compile_contract(&mut self.context, root_crate_id, &compile_options) + .map_err(|errs| { + CompileError::with_file_diagnostics( + "Failed to compile contract", + errs, + &self.context.file_manager, + ) + })? + .0; + + let optimized_contract = nargo::ops::optimize_contract(compiled_contract, np_language); + + let compile_output = generate_contract_artifact(optimized_contract); + Ok(JsCompileResult::new(compile_output)) + } +} + +/// This is a method that exposes the same API as `compile` +/// But uses the Context based APi internally +#[wasm_bindgen] +pub fn compile_( + entry_point: String, + contracts: Option, + dependency_graph: Option, + file_source_map: PathToFileSourceMap, +) -> Result { + use std::collections::HashMap; + + console_error_panic_hook::set_once(); + + let dependency_graph: crate::compile::DependencyGraph = + if let Some(dependency_graph) = dependency_graph { + ::into_serde( + &wasm_bindgen::JsValue::from(dependency_graph), + ) + .map_err(|err| err.to_string())? + } else { + crate::compile::DependencyGraph::default() + }; + + let mut compiler_context = CompilerContext::new(file_source_map); + + // Set the root crate + let root_id = compiler_context.process_root_crate(entry_point.clone()); + + let add_noir_lib = |context: &mut CompilerContext, lib_name: &CrateName| -> CrateIDWrapper { + let lib_name_string = lib_name.to_string(); + let path_to_lib = Path::new(&lib_name_string) + .join("lib.nr") + .to_str() + .expect("paths are expected to be valid utf-8") + .to_string(); + context.process_dependency_crate(path_to_lib) + }; + + // Add the dependency graph + let mut crate_names: HashMap = HashMap::new(); + // + // Process the direct dependencies of the root + for lib_name in dependency_graph.root_dependencies { + let lib_name_string = lib_name.to_string(); + + let crate_id = add_noir_lib(&mut compiler_context, &lib_name); + + crate_names.insert(lib_name.clone(), crate_id); + + // Add the dependency edges + compiler_context.add_dependency_edge(lib_name_string, &root_id, &crate_id)?; + } + + // Process the transitive dependencies of the root + for (lib_name, dependencies) in &dependency_graph.library_dependencies { + // first create the library crate if needed + // this crate might not have been registered yet because of the order of the HashMap + // e.g. {root: [lib1], libs: { lib2 -> [lib3], lib1 -> [lib2] }} + let crate_id = *crate_names + .entry(lib_name.clone()) + .or_insert_with(|| add_noir_lib(&mut compiler_context, lib_name)); + + for dependency_name in dependencies { + let dependency_name_string = dependency_name.to_string(); + + let dep_crate_id = crate_names + .entry(dependency_name.clone()) + .or_insert_with(|| add_noir_lib(&mut compiler_context, dependency_name)); + + compiler_context.add_dependency_edge( + dependency_name_string, + &crate_id, + dep_crate_id, + )?; + } + } + + let is_contract = contracts.unwrap_or(false); + let program_width = 3; + + if is_contract { + compiler_context.compile_contract(program_width) + } else { + compiler_context.compile_program(program_width) + } +} + +#[cfg(test)] +mod test { + use noirc_driver::prepare_crate; + use noirc_frontend::hir::Context; + + use crate::compile::{file_manager_with_source_map, parse_all, PathToFileSourceMap}; + + use std::path::Path; + + use super::CompilerContext; + + fn setup_test_context(source_map: PathToFileSourceMap) -> CompilerContext { + let mut fm = file_manager_with_source_map(source_map); + // Add this due to us calling prepare_crate on "/main.nr" below + fm.add_file_with_source(Path::new("/main.nr"), "fn foo() {}".to_string()); + let parsed_files = parse_all(&fm); + + let mut context = Context::new(fm, parsed_files); + prepare_crate(&mut context, Path::new("/main.nr")); + + CompilerContext { context } + } + + #[test] + fn test_works_with_empty_dependency_graph() { + let source_map = PathToFileSourceMap::default(); + let context = setup_test_context(source_map); + + // one stdlib + one root crate + assert_eq!(context.crate_graph().number_of_crates(), 2); + } + + #[test] + fn test_works_with_root_dependencies() { + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + context.process_dependency_crate("lib1/lib.nr".to_string()); + + assert_eq!(context.crate_graph().number_of_crates(), 3); + } + + #[test] + fn test_works_with_duplicate_root_dependencies() { + let source_map = PathToFileSourceMap( + vec![(Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string())] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 3); + } + + #[test] + fn test_works_with_transitive_dependencies() { + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let lib2_crate_id = context.process_dependency_crate("lib2/lib.nr".to_string()); + let lib3_crate_id = context.process_dependency_crate("lib3/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib2".to_string(), &lib1_crate_id, &lib2_crate_id).unwrap(); + context.add_dependency_edge("lib3".to_string(), &lib2_crate_id, &lib3_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 5); + } + + #[test] + fn test_works_with_missing_dependencies() { + let source_map = PathToFileSourceMap( + vec![ + (Path::new("lib1/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib2/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + (Path::new("lib3/lib.nr").to_path_buf(), "fn foo() {}".to_string()), + ] + .into_iter() + .collect(), + ); + let mut context = setup_test_context(source_map); + + let lib1_crate_id = context.process_dependency_crate("lib1/lib.nr".to_string()); + let lib2_crate_id = context.process_dependency_crate("lib2/lib.nr".to_string()); + let lib3_crate_id = context.process_dependency_crate("lib3/lib.nr".to_string()); + let root_crate_id = context.root_crate_id(); + + context.add_dependency_edge("lib1".to_string(), &root_crate_id, &lib1_crate_id).unwrap(); + context.add_dependency_edge("lib3".to_string(), &lib2_crate_id, &lib3_crate_id).unwrap(); + + assert_eq!(context.crate_graph().number_of_crates(), 5); + } +} diff --git a/compiler/wasm/src/errors.rs b/compiler/wasm/src/errors.rs index 9aafcadc27f..ef56dcfc911 100644 --- a/compiler/wasm/src/errors.rs +++ b/compiler/wasm/src/errors.rs @@ -124,7 +124,10 @@ impl CompileError { let diagnostics: Vec<_> = file_diagnostics .iter() .map(|err| { - Diagnostic::new(err, file_manager.path(err.file_id).to_str().unwrap().to_string()) + let file_path = file_manager + .path(err.file_id) + .expect("File must exist to have caused diagnostics"); + Diagnostic::new(err, file_path.to_str().unwrap().to_string()) }) .collect(); diff --git a/compiler/wasm/src/index.cts b/compiler/wasm/src/index.cts new file mode 100644 index 00000000000..14687e615df --- /dev/null +++ b/compiler/wasm/src/index.cts @@ -0,0 +1,50 @@ +import { FileManager } from './noir/file-manager/file-manager'; +import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; +import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; +import { LogData, LogFn } from './utils'; +import { CompilationResult } from './types/noir_artifact'; +import { inflateDebugSymbols } from './noir/debug'; + +async function compile( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + if (logFn && !debugLogFn) { + debugLogFn = logFn; + } + + const cjs = await require('../build/cjs'); + const compiler = await NoirWasmCompiler.new( + fileManager, + projectPath ?? fileManager.getDataDir(), + cjs, + new cjs.PathToFileSourceMap(), + { + log: + logFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.log(msg, data); + } else { + console.log(msg); + } + }, + debugLog: + debugLogFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.debug(msg, data); + } else { + console.debug(msg); + } + }, + }, + ); + return await compiler.compile(); +} + +const createFileManager = createNodejsFileManager; + +export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; diff --git a/compiler/wasm/src/index.mts b/compiler/wasm/src/index.mts new file mode 100644 index 00000000000..8774a7857ef --- /dev/null +++ b/compiler/wasm/src/index.mts @@ -0,0 +1,52 @@ +import { FileManager } from './noir/file-manager/file-manager'; +import { createNodejsFileManager } from './noir/file-manager/nodejs-file-manager'; +import { NoirWasmCompiler } from './noir/noir-wasm-compiler'; +import { LogData, LogFn } from './utils'; +import { CompilationResult } from './types/noir_artifact'; +import { inflateDebugSymbols } from './noir/debug'; + +async function compile( + fileManager: FileManager, + projectPath?: string, + logFn?: LogFn, + debugLogFn?: LogFn, +): Promise { + if (logFn && !debugLogFn) { + debugLogFn = logFn; + } + + const esm = await import(/* webpackMode: "eager" */ '../build/esm'); + await esm.default(); + + const compiler = await NoirWasmCompiler.new( + fileManager, + projectPath ?? fileManager.getDataDir(), + esm, + new esm.PathToFileSourceMap(), + { + log: + logFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.log(msg, data); + } else { + console.log(msg); + } + }, + debugLog: + debugLogFn ?? + function (msg: string, data?: LogData) { + if (data) { + console.debug(msg, data); + } else { + console.debug(msg); + } + }, + }, + ); + return await compiler.compile(); +} + +const createFileManager = createNodejsFileManager; + +export { compile, createFileManager, inflateDebugSymbols, CompilationResult }; diff --git a/compiler/wasm/src/lib.rs b/compiler/wasm/src/lib.rs index 9f2f558f85c..6d737a0ea6d 100644 --- a/compiler/wasm/src/lib.rs +++ b/compiler/wasm/src/lib.rs @@ -4,21 +4,26 @@ // See Cargo.toml for explanation. use getrandom as _; +use rust_embed as _; use gloo_utils::format::JsValueSerdeExt; -use log::Level; + use noirc_driver::{GIT_COMMIT, GIT_DIRTY, NOIRC_VERSION}; use serde::{Deserialize, Serialize}; -use std::str::FromStr; -use wasm_bindgen::prelude::*; +use tracing_subscriber::prelude::*; +use tracing_subscriber::EnvFilter; +use tracing_web::MakeWebConsoleWriter; -mod circuit; mod compile; +mod compile_new; mod errors; -pub use circuit::{acir_read_bytes, acir_write_bytes}; pub use compile::compile; +// Expose the new Context-Centric API +pub use compile_new::{compile_, CompilerContext, CrateIDWrapper}; +use wasm_bindgen::{prelude::wasm_bindgen, JsValue}; + #[derive(Serialize, Deserialize)] pub struct BuildInfo { git_hash: &'static str, @@ -27,14 +32,21 @@ pub struct BuildInfo { } #[wasm_bindgen] -pub fn init_log_level(level: String) { +pub fn init_log_level(filter: String) { // Set the static variable from Rust use std::sync::Once; - let log_level = Level::from_str(&level).unwrap_or(Level::Error); + let filter: EnvFilter = + filter.parse().expect("Could not parse log filter while initializing logger"); + static SET_HOOK: Once = Once::new(); SET_HOOK.call_once(|| { - wasm_logger::init(wasm_logger::Config::new(log_level)); + let fmt_layer = tracing_subscriber::fmt::layer() + .with_ansi(false) + .without_time() + .with_writer(MakeWebConsoleWriter::new()); + + tracing_subscriber::registry().with(fmt_layer.with_filter(filter)).init(); }); } diff --git a/compiler/wasm/src/noir/debug.ts b/compiler/wasm/src/noir/debug.ts new file mode 100644 index 00000000000..7a65f4b68c2 --- /dev/null +++ b/compiler/wasm/src/noir/debug.ts @@ -0,0 +1,6 @@ +import { inflate } from 'pako'; + +/** Decompresses and decodes the debug symbols */ +export function inflateDebugSymbols(debugSymbols: string) { + return JSON.parse(inflate(Buffer.from(debugSymbols, 'base64'), { to: 'string', raw: true })); +} diff --git a/compiler/wasm/src/noir/dependencies/dependency-manager.ts b/compiler/wasm/src/noir/dependencies/dependency-manager.ts new file mode 100644 index 00000000000..944ec0070c5 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/dependency-manager.ts @@ -0,0 +1,150 @@ +import { join } from 'path'; + +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig } from '../../types/noir_package_config'; +import { LogData, LogFn } from '../../utils'; + +/** + * Noir Dependency Resolver + */ +export class DependencyManager { + #entryPoint: Package; + #libraries = new Map(); + #dependencies = new Map(); + #log: LogFn; + #resolvers: readonly DependencyResolver[]; + + /** + * Creates a new dependency resolver + * @param resolvers - A list of dependency resolvers to use + * @param entryPoint - The entry point of the project + */ + constructor(resolvers: readonly DependencyResolver[] = [], entryPoint: Package) { + this.#resolvers = resolvers; + this.#entryPoint = entryPoint; + this.#log = (msg: string, _data?: LogData) => { + console.log(msg); + }; + } + + /** + * Gets dependencies for the entry point + */ + public getEntrypointDependencies() { + return this.#dependencies.get('') ?? []; + } + + /** + * Get transitive libraries used by the package + */ + public getLibraries() { + return Array.from(this.#libraries.entries()); + } + + /** + * A map of library dependencies + */ + public getLibraryDependencies() { + const entries = Array.from(this.#dependencies.entries()); + return Object.fromEntries(entries.filter(([name]) => name !== '')); + } + + /** + * Resolves dependencies for a package. + */ + public async resolveDependencies(): Promise { + await this.#breadthFirstResolveDependencies(); + } + + /** + * Gets the version of a dependency in the dependency tree + * @param name - Dependency name + * @returns The dependency's version + */ + public getVersionOf(name: string): string | undefined { + const dep = this.#libraries.get(name); + return dep?.version; + } + + async #breadthFirstResolveDependencies(): Promise { + /** Represents a package to resolve dependencies for */ + type Job = { + /** Package name */ + packageName: string; + /** The package location */ + noirPackage: Package; + }; + + const queue: Job[] = [ + { + packageName: '', + noirPackage: this.#entryPoint, + }, + ]; + + while (queue.length > 0) { + const { packageName, noirPackage } = queue.shift()!; + for (const [name, config] of Object.entries(noirPackage.getDependencies())) { + // TODO what happens if more than one package has the same name but different versions? + if (this.#libraries.has(name)) { + this.#log(`skipping already resolved dependency ${name}`); + this.#dependencies.set(packageName, [...(this.#dependencies.get(packageName) ?? []), name]); + + continue; + } + const dependency = await this.#resolveDependency(noirPackage, config); + if (dependency.package.getType() !== 'lib') { + this.#log(`Non-library package ${name}`, config); + throw new Error(`Dependency ${name} is not a library`); + } + + this.#libraries.set(name, dependency); + this.#dependencies.set(packageName, [...(this.#dependencies.get(packageName) ?? []), name]); + + queue.push({ + noirPackage: dependency.package, + packageName: name, + }); + } + } + } + + async #resolveDependency(pkg: Package, config: DependencyConfig): Promise { + let dependency: Dependency | null = null; + for (const resolver of this.#resolvers) { + dependency = await resolver.resolveDependency(pkg, config); + if (dependency) { + break; + } + } + + if (!dependency) { + throw new Error('Dependency not resolved'); + } + + return dependency; + } + + /** + * Gets the names of the crates in this dependency list + */ + public getPackageNames() { + return [...this.#libraries.keys()]; + } + + /** + * Looks up a dependency + * @param sourceId - The source being resolved + * @returns The path to the resolved file + */ + public findFile(sourceId: string): string | null { + const [lib, ...path] = sourceId.split('/').filter((x) => x); + const dep = this.#libraries.get(lib); + if (dep) { + return join(dep.package.getSrcPath(), ...path); + } else { + return null; + } + } +} diff --git a/compiler/wasm/src/noir/dependencies/dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/dependency-resolver.ts new file mode 100644 index 00000000000..266d2075e1e --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/dependency-resolver.ts @@ -0,0 +1,24 @@ +import { DependencyConfig } from '../../types/noir_package_config'; +import { Package } from '../package'; + +/** + * A Noir dependency + */ +export type Dependency = { + /** version string as determined by the resolver */ + version?: string; + /** the actual package source code */ + package: Package; +}; + +/** + * Resolves a dependency for a package. + */ +export interface DependencyResolver { + /** + * Resolve a dependency for a package. + * @param pkg - The package to resolve dependencies for + * @param dep - The dependency config to resolve + */ + resolveDependency(pkg: Package, dep: DependencyConfig): Promise; +} diff --git a/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts new file mode 100644 index 00000000000..8b08b6f0dd8 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/github-dependency-resolver.ts @@ -0,0 +1,145 @@ +import { delimiter, join, sep } from 'path'; +import { unzip } from 'unzipit'; + +import { FileManager } from '../file-manager/file-manager'; +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig, GitDependencyConfig } from '../../types/noir_package_config'; +import { LogData } from '../../utils'; + +/** + * Downloads dependencies from github + */ +export class GithubDependencyResolver implements DependencyResolver { + #fm: FileManager; + #log; + + constructor(fm: FileManager) { + this.#fm = fm; + this.#log = (msg: string, _data?: LogData) => { + console.log(msg); + }; + } + + /** + * Resolves a dependency from github. Returns null if URL is for a different website. + * @param _pkg - The package to resolve the dependency for + * @param dependency - The dependency configuration + * @returns asd + */ + async resolveDependency(_pkg: Package, dependency: DependencyConfig): Promise { + // TODO accept ssh urls? + // TODO github authentication? + if (!('git' in dependency) || !dependency.git.startsWith('https://github.com')) { + return null; + } + + const archivePath = await this.#fetchZipFromGithub(dependency); + const libPath = await this.#extractZip(dependency, archivePath); + return { + version: dependency.tag, + package: await Package.open(libPath, this.#fm), + }; + } + + async #fetchZipFromGithub(dependency: Pick): Promise { + if (!dependency.git.startsWith('https://github.com')) { + throw new Error('Only github dependencies are supported'); + } + + const url = resolveGithubCodeArchive(dependency, 'zip'); + const localArchivePath = join('archives', safeFilename(url.pathname)); + + // TODO should check signature before accepting any file + if (this.#fm.hasFileSync(localArchivePath)) { + this.#log('using cached archive', { url: url.href, path: localArchivePath }); + return localArchivePath; + } + + const response = await fetch(url, { + method: 'GET', + }); + + if (!response.ok || !response.body) { + throw new Error(`Failed to fetch ${url}: ${response.statusText}`); + } + + const tmpFile = localArchivePath + '.tmp'; + await this.#fm.writeFile(tmpFile, response.body); + await this.#fm.moveFile(tmpFile, localArchivePath); + + return localArchivePath; + } + + async #extractZip(dependency: GitDependencyConfig, archivePath: string): Promise { + const gitUrl = new URL(dependency.git); + // extract the archive to this location + const extractLocation = join('libs', safeFilename(gitUrl.pathname + '@' + (dependency.tag ?? 'HEAD'))); + + // where we expect to find this package after extraction + // it might already exist if the archive got unzipped previously + const packagePath = join(extractLocation, dependency.directory ?? ''); + + if (this.#fm.hasFileSync(packagePath)) { + return packagePath; + } + + const { entries } = await unzip(await this.#fm.readFile(archivePath)); + + // extract to a temporary directory, then move it to the final location + // TODO empty the temp directory first + const tmpExtractLocation = extractLocation + '.tmp'; + for (const entry of Object.values(entries)) { + if (entry.isDirectory) { + continue; + } + + // remove the first path segment, because it'll be the archive name + const name = stripSegments(entry.name, 1); + const path = join(tmpExtractLocation, name); + await this.#fm.writeFile(path, (await entry.blob()).stream()); + } + + await this.#fm.moveFile(tmpExtractLocation, extractLocation); + + return packagePath; + } +} + +/** + * Strips the first n segments from a path + */ +function stripSegments(path: string, count: number): string { + const segments = path.split(sep).filter(Boolean); + return segments.slice(count).join(sep); +} + +/** + * Returns a safe filename for a value + * @param val - The value to convert + */ +export function safeFilename(val: string): string { + if (!val) { + throw new Error('invalid value'); + } + + return val.replaceAll(sep, '_').replaceAll(delimiter, '_').replace(/^_+/, ''); +} + +/** + * Resolves a dependency's archive URL. + * @param dependency - The dependency configuration + * @returns The URL to the library archive + */ +export function resolveGithubCodeArchive(dependency: GitDependencyConfig, format: 'zip' | 'tar'): URL { + const gitUrl = new URL(dependency.git); + const [owner, repo] = gitUrl.pathname.slice(1).split('/'); + const ref = dependency.tag ?? 'HEAD'; + const extension = format === 'zip' ? 'zip' : 'tar.gz'; + + if (!owner || !repo || gitUrl.hostname !== 'github.com') { + throw new Error('Invalid Github repository URL'); + } + + return new URL(`https://github.com/${owner}/${repo}/archive/${ref}.${extension}`); +} diff --git a/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts b/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts new file mode 100644 index 00000000000..50338421143 --- /dev/null +++ b/compiler/wasm/src/noir/dependencies/local-dependency-resolver.ts @@ -0,0 +1,31 @@ +import { isAbsolute, join } from 'path'; + +import { FileManager } from '../file-manager/file-manager'; +import { Package } from '../package'; +import { Dependency, DependencyResolver } from './dependency-resolver'; +import { DependencyConfig } from '../../types/noir_package_config'; + +/** + * Resolves dependencies on-disk, relative to current package + */ +export class LocalDependencyResolver implements DependencyResolver { + #fm: FileManager; + + constructor(fm: FileManager) { + this.#fm = fm; + } + + async resolveDependency(parent: Package, config: DependencyConfig): Promise { + if ('path' in config) { + const parentPath = parent.getPackagePath(); + const dependencyPath = isAbsolute(config.path) ? config.path : join(parentPath, config.path); + return { + // unknown version, Nargo.toml doesn't have a version field + version: undefined, + package: await Package.open(dependencyPath, this.#fm), + }; + } else { + return null; + } + } +} diff --git a/compiler/wasm/src/noir/file-manager/file-manager.ts b/compiler/wasm/src/noir/file-manager/file-manager.ts new file mode 100644 index 00000000000..297e9f541e6 --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/file-manager.ts @@ -0,0 +1,163 @@ +import { dirname, isAbsolute, join } from 'path'; + +/** + * A file system interface that matches the node fs module. + */ +export interface FileSystem { + /** Checks if the file exists */ + existsSync: (path: string) => boolean; + /** Creates a directory structure */ + mkdir: ( + dir: string, + opts?: { + /** Create parent directories as needed */ + recursive: boolean; + }, + ) => Promise; + /** Writes a file */ + writeFile: (path: string, data: Uint8Array) => Promise; + /** Reads a file */ + readFile: (path: string, encoding?: 'utf-8') => Promise; + /** Renames a file */ + rename: (oldPath: string, newPath: string) => Promise; + /** Reads a directory */ + readdir: ( + path: string, + options?: { + /** Traverse child directories recursively */ + recursive: boolean; + }, + ) => Promise; +} + +/** + * A file manager that writes file to a specific directory but reads globally. + */ +export class FileManager { + #fs: FileSystem; + #dataDir: string; + + constructor(fs: FileSystem, dataDir: string) { + this.#fs = fs; + this.#dataDir = dataDir; + } + + /** + * Returns the data directory + */ + getDataDir() { + return this.#dataDir; + } + + /** + * Saves a file to the data directory. + * @param name - File to save + * @param stream - File contents + */ + public async writeFile(name: string, stream: ReadableStream): Promise { + if (isAbsolute(name)) { + throw new Error("can't write absolute path"); + } + + const path = this.#getPath(name); + const chunks: Uint8Array[] = []; + const reader = stream.getReader(); + + // eslint-disable-next-line no-constant-condition + while (true) { + const { done, value } = await reader.read(); + if (done) { + break; + } + + chunks.push(value); + } + + const file = new Uint8Array(chunks.reduce((acc, chunk) => acc + chunk.length, 0)); + let offset = 0; + for (const chunk of chunks) { + file.set(chunk, offset); + offset += chunk.length; + } + + await this.#fs.mkdir(dirname(path), { recursive: true }); + await this.#fs.writeFile(this.#getPath(path), file); + } + + /** + * Reads a file from the filesystem and returns a buffer + * Saves a file to the data directory. + * @param oldName - File to save + * @param newName - File contents + */ + async moveFile(oldName: string, newName: string) { + if (isAbsolute(oldName) || isAbsolute(newName)) { + throw new Error("can't move absolute path"); + } + + const oldPath = this.#getPath(oldName); + const newPath = this.#getPath(newName); + + await this.#fs.mkdir(dirname(newPath), { recursive: true }); + await this.#fs.rename(oldPath, newPath); + } + + /** + * Reads a file from the disk and returns a buffer + * @param name - File to read + */ + public async readFile(name: string): Promise; + /** + * Reads a file from the filesystem as a string + * @param name - File to read + * @param encoding - Encoding to use + */ + public async readFile(name: string, encoding: 'utf-8'): Promise; + /** + * Reads a file from the filesystem + * @param name - File to read + * @param encoding - Encoding to use + */ + public async readFile(name: string, encoding?: 'utf-8'): Promise { + const path = this.#getPath(name); + const data = await this.#fs.readFile(path, encoding); + + if (!encoding) { + return typeof data === 'string' + ? new TextEncoder().encode(data) // this branch shouldn't be hit, but just in case + : new Uint8Array(data.buffer, data.byteOffset, data.byteLength / Uint8Array.BYTES_PER_ELEMENT); + } + + return data; + } + + /** + * Checks if a file exists and is accessible + * @param name - File to check + */ + public hasFileSync(name: string): boolean { + return this.#fs.existsSync(this.#getPath(name)); + } + + #getPath(name: string) { + return isAbsolute(name) ? name : join(this.#dataDir, name); + } + + /** + * Reads a file from the filesystem + * @param dir - File to read + * @param options - Readdir options + */ + public async readdir( + dir: string, + options?: { + /** + * Traverse child directories recursively + */ + recursive: boolean; + }, + ) { + const dirPath = this.#getPath(dir); + return await this.#fs.readdir(dirPath, options); + } +} diff --git a/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts b/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts new file mode 100644 index 00000000000..b9ee3226be2 --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/memfs-file-manager.ts @@ -0,0 +1,58 @@ +import { IFs, fs } from 'memfs'; +import { IDirent } from 'memfs/lib/node/types/misc'; + +import { FileManager } from './file-manager'; + +/** + * Creates a new FileManager instance based on a MemFS instance + * @param memFS - the memfs backing instance + * @param dataDir - where to store files + */ +export function createMemFSFileManager(memFS: IFs = fs, dataDir = '/'): FileManager { + const readdirRecursive = async (dir: string): Promise => { + const contents = await memFS.promises.readdir(dir); + let files: string[] = []; + for (const handle in contents) { + if ((handle as unknown as IDirent).isFile()) { + files.push(handle.toString()); + } else { + files = files.concat(await readdirRecursive(handle.toString())); + } + } + return files; + }; + return new FileManager( + { + existsSync: memFS.existsSync.bind(memFS), + mkdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + await memFS.promises.mkdir(dir, options); + }, + writeFile: memFS.promises.writeFile.bind(memFS), + rename: memFS.promises.rename.bind(memFS), + readFile: memFS.promises.readFile.bind(memFS), + readdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + if (options?.recursive) { + return readdirRecursive(dir); + } + return (await memFS.promises.readdir(dir)).map((handles) => handles.toString()); + }, + }, + dataDir, + ); +} diff --git a/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts b/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts new file mode 100644 index 00000000000..1a8250f49cc --- /dev/null +++ b/compiler/wasm/src/noir/file-manager/nodejs-file-manager.ts @@ -0,0 +1,62 @@ +import { existsSync } from 'fs'; +import { promises as fs } from 'fs'; + +import { FileManager } from './file-manager'; + +// This is needed because memfs doesn't support the recursive flag yet +export async function readdirRecursive(dir: string): Promise { + const contents = await fs.readdir(dir); + let files: string[] = []; + for (const handle of contents) { + if ((await fs.stat(`${dir}/${handle}`)).isFile()) { + files.push(`${dir}/${handle.toString()}`); + } else { + files = files.concat(await readdirRecursive(`${dir}/${handle.toString()}`)); + } + } + return files; +} + +/** + * Creates a new FileManager instance based on nodejs fs + * @param dataDir - where to store files + */ +export function createNodejsFileManager(dataDir: string): FileManager { + return new FileManager( + { + ...fs, + ...{ + // ExistsSync is not available in the fs/promises module + existsSync, + // This is added here because the node types are not compatible with the FileSystem type for mkdir + // Typescripts tries to use a different variant of the function that is not the one that has the optional options. + mkdir: async ( + dir: string, + opts?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + await fs.mkdir(dir, opts); + }, + readdir: async ( + dir: string, + options?: { + /** + * Traverse child directories + */ + recursive: boolean; + }, + ) => { + if (options?.recursive) { + return readdirRecursive(dir); + } + return (await fs.readdir(dir)).map((handles) => handles.toString()); + }, + }, + }, + dataDir, + ); +} diff --git a/compiler/wasm/src/noir/noir-wasm-compiler.ts b/compiler/wasm/src/noir/noir-wasm-compiler.ts new file mode 100644 index 00000000000..2a0af5d8fee --- /dev/null +++ b/compiler/wasm/src/noir/noir-wasm-compiler.ts @@ -0,0 +1,173 @@ +import { isAbsolute } from 'path'; + +import { DependencyManager } from './dependencies/dependency-manager'; +import { GithubDependencyResolver as GithubCodeArchiveDependencyResolver } from './dependencies/github-dependency-resolver'; +import { LocalDependencyResolver } from './dependencies/local-dependency-resolver'; +import { FileManager } from './file-manager/file-manager'; +import { Package } from './package'; +import { LogFn } from '../utils'; +import { CompilationResult } from '../types/noir_artifact'; + +/** Compilation options */ +export type NoirWasmCompileOptions = { + /** Logging function */ + log: LogFn; + /** Log debugging information through this function */ + debugLog: LogFn; +}; + +/** + * Noir Package Compiler + */ +export class NoirWasmCompiler { + #log: LogFn; + #debugLog: LogFn; + #package: Package; + /* eslint-disable @typescript-eslint/no-explicit-any */ + #wasmCompiler: any; + #sourceMap: any; + /* eslint-disable @typescript-eslint/no-explicit-any */ + #fm: FileManager; + #dependencyManager: DependencyManager; + + private constructor( + entrypoint: Package, + dependencyManager: DependencyManager, + fileManager: FileManager, + wasmCompiler: unknown, + sourceMap: unknown, + opts: NoirWasmCompileOptions, + ) { + this.#log = opts.log; + this.#debugLog = opts.debugLog; + this.#package = entrypoint; + this.#fm = fileManager; + this.#wasmCompiler = wasmCompiler; + this.#sourceMap = sourceMap; + this.#dependencyManager = dependencyManager; + } + + /** + * Creates a new compiler instance. + * @param fileManager - The file manager to use + * @param projectPath - The path to the project + * @param opts - Compilation options + */ + public static async new( + fileManager: FileManager, + projectPath: string, + /* eslint-disable @typescript-eslint/no-explicit-any */ + wasmCompiler: any, + sourceMap: any, + /* eslint-enable @typescript-eslint/no-explicit-any */ + opts: NoirWasmCompileOptions, + ) { + // Assume the filemanager is initialized at the project root + if (!isAbsolute(projectPath)) { + throw new Error('projectPath must be an absolute path'); + } + + const noirPackage = await Package.open(projectPath, fileManager); + + const dependencyManager = new DependencyManager( + [ + new LocalDependencyResolver(fileManager), + new GithubCodeArchiveDependencyResolver(fileManager), + // TODO support actual Git repositories + ], + noirPackage, + ); + + return new NoirWasmCompiler(noirPackage, dependencyManager, fileManager, wasmCompiler, sourceMap, opts); + } + + /** + * Compile EntryPoint + */ + /** + * Compile EntryPoint + */ + public async compile(): Promise { + console.log(`Compiling at ${this.#package.getEntryPointPath()}`); + + if (!(this.#package.getType() === 'contract' || this.#package.getType() === 'bin')) { + throw new Error(`Only supports compiling "contract" and "bin" package types (${this.#package.getType()})`); + } + await this.#dependencyManager.resolveDependencies(); + this.#debugLog(`Dependencies: ${this.#dependencyManager.getPackageNames().join(', ')}`); + + try { + const isContract: boolean = this.#package.getType() === 'contract'; + + const entrypoint = this.#package.getEntryPointPath(); + const deps = { + /* eslint-disable camelcase */ + root_dependencies: this.#dependencyManager.getEntrypointDependencies(), + library_dependencies: this.#dependencyManager.getLibraryDependencies(), + /* eslint-enable camelcase */ + }; + const packageSources = await this.#package.getSources(this.#fm); + const librarySources = ( + await Promise.all( + this.#dependencyManager + .getLibraries() + .map(async ([alias, library]) => await library.package.getSources(this.#fm, alias)), + ) + ).flat(); + [...packageSources, ...librarySources].forEach((sourceFile) => { + this.#debugLog(`Adding source ${sourceFile.path}`); + this.#sourceMap.add_source_code(sourceFile.path, sourceFile.source); + }); + const result = this.#wasmCompiler.compile(entrypoint, isContract, deps, this.#sourceMap); + + if ((isContract && !('contract' in result)) || (!isContract && !('program' in result))) { + throw new Error('Invalid compilation result'); + } + + return result; + } catch (err) { + if (err instanceof Error && err.name === 'CompileError') { + const logs = await this.#processCompileError(err); + for (const log of logs) { + this.#log(log); + } + throw new Error(logs.join('\n')); + } + + throw err; + } + } + + async #resolveFile(path: string) { + try { + const libFile = this.#dependencyManager.findFile(path); + return await this.#fm.readFile(libFile ?? path, 'utf-8'); + } catch (err) { + return ''; + } + } + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + async #processCompileError(err: any): Promise { + const logs = []; + for (const diag of err.diagnostics) { + logs.push(` ${diag.message}`); + const contents = await this.#resolveFile(diag.file); + const lines = contents.split('\n'); + const lineOffsets = lines.reduce((accum, _, idx) => { + if (idx === 0) { + accum.push(0); + } else { + accum.push(accum[idx - 1] + lines[idx - 1].length + 1); + } + return accum; + }, []); + + for (const secondary of diag.secondaries) { + const errorLine = lineOffsets.findIndex((offset) => offset > secondary.start); + logs.push(` ${diag.file}:${errorLine}: ${contents.slice(secondary.start, secondary.end)}`); + } + } + return logs; + } +} diff --git a/compiler/wasm/src/noir/package.ts b/compiler/wasm/src/noir/package.ts new file mode 100644 index 00000000000..a2496a03b3a --- /dev/null +++ b/compiler/wasm/src/noir/package.ts @@ -0,0 +1,129 @@ +import { parse } from '@ltd/j-toml'; +import { join } from 'path'; + +import { FileManager } from './file-manager/file-manager'; +import { DependencyConfig, PackageConfig, parseNoirPackageConfig } from '../types/noir_package_config'; + +const CONFIG_FILE_NAME = 'Nargo.toml'; +const SOURCE_EXTENSIONS = ['.nr']; + +/** + * An array of sources for a package + */ +type SourceList = Array<{ + /** + * The source path, taking into account modules and aliases. Eg: mylib/mod/mysource.nr + */ + path: string; + /** + * Resolved source plaintext + */ + source: string; +}>; + +/** + * A Noir package. + */ +export class Package { + #packagePath: string; + #srcPath: string; + #config: PackageConfig; + #version: string | null = null; + + public constructor(path: string, srcDir: string, config: PackageConfig) { + this.#packagePath = path; + this.#srcPath = srcDir; + this.#config = config; + } + + /** + * Gets this package's path. + */ + public getPackagePath() { + return this.#packagePath; + } + + /** + * Gets this package's Nargo.toml (NoirPackage)Config. + */ + public getPackageConfig() { + return this.#config; + } + + /** + * The path to the source directory. + */ + public getSrcPath() { + return this.#srcPath; + } + + /** + * Gets the entrypoint path for this package. + */ + public getEntryPointPath(): string { + let entrypoint: string; + + switch (this.getType()) { + case 'lib': + // we shouldn't need to compile `lib` type, since the .nr source is read directly + // when the lib is used as a dependency elsewhere. + entrypoint = 'lib.nr'; + break; + case 'contract': + case 'bin': + entrypoint = 'main.nr'; + break; + default: + throw new Error(`Unknown package type: ${this.getType()}`); + } + // TODO check that `src` exists + return join(this.#srcPath, entrypoint); + } + + /** + * Gets the project type + */ + public getType() { + return this.#config.package.type; + } + + /** + * Gets this package's dependencies. + */ + public getDependencies(): Record { + return this.#config.dependencies; + } + + /** + * Gets this package's sources. + * @param fm - A file manager to use + * @param alias - An alias for the sources, if this package is a dependency + */ + public async getSources(fm: FileManager, alias?: string): Promise { + const handles = await fm.readdir(this.#srcPath, { recursive: true }); + return Promise.all( + handles + .filter((handle) => SOURCE_EXTENSIONS.find((ext) => handle.endsWith(ext))) + .map(async (file) => { + const suffix = file.replace(this.#srcPath, ''); + return { + path: this.getType() === 'lib' ? `${alias ? alias : this.#config.package.name}${suffix}` : file, + source: (await fm.readFile(file, 'utf-8')).toString(), + }; + }), + ); + } + + /** + * Opens a path on the filesystem. + * @param path - Path to the package. + * @param fm - A file manager to use. + * @returns The Noir package at the given location + */ + public static async open(path: string, fm: FileManager): Promise { + const fileContents = await fm.readFile(join(path, CONFIG_FILE_NAME), 'utf-8'); + const config = parseNoirPackageConfig(parse(fileContents)); + + return new Package(path, join(path, 'src'), config); + } +} diff --git a/compiler/wasm/src/types/noir_artifact.ts b/compiler/wasm/src/types/noir_artifact.ts new file mode 100644 index 00000000000..715877e335f --- /dev/null +++ b/compiler/wasm/src/types/noir_artifact.ts @@ -0,0 +1,199 @@ +import { Abi, AbiType } from '@noir-lang/noirc_abi'; + +/** + * A named type. + */ +export interface ABIVariable { + /** + * The name of the variable. + */ + name: string; + /** + * The type of the variable. + */ + type: AbiType; +} + +/** + * A contract event. + */ +export interface EventAbi { + /** + * The event name. + */ + name: string; + /** + * Fully qualified name of the event. + */ + path: string; + /** + * The fields of the event. + */ + fields: ABIVariable[]; +} + +/** The Noir function types. */ +export type NoirFunctionType = 'Open' | 'Secret' | 'Unconstrained'; + +/** + * The compilation result of an Noir function. + */ +export interface NoirFunctionEntry { + /** The name of the function. */ + name: string; + /** The type of the function. */ + function_type: NoirFunctionType; + /** Whether the function is internal. */ + is_internal: boolean; + /** The ABI of the function. */ + abi: Abi; + /** The bytecode of the function in base64. */ + bytecode: string; + /** The debug information, compressed and base64 encoded. */ + debug_symbols: string; +} + +/** + * The compilation result of an Noir contract. + */ +export interface ContractArtifact { + /** The name of the contract. */ + name: string; + /** Version of noir used for the build. */ + noir_version: string; + /** The functions of the contract. */ + functions: NoirFunctionEntry[]; + /** The events of the contract */ + events: EventAbi[]; + /** The map of file ID to the source code and path of the file. */ + file_map: DebugFileMap; +} + +/** + * The compilation result of an Noir contract. + */ +export interface ProgramArtifact { + /** The hash of the circuit. */ + hash?: number; + /** * The ABI of the function. */ + abi: Abi; + /** The bytecode of the circuit in base64. */ + bytecode: string; + /** The debug information, compressed and base64 encoded. */ + debug_symbols: string; + /** The map of file ID to the source code and path of the file. */ + file_map: DebugFileMap; +} + +/** + * A file ID. It's assigned during compilation. + */ +export type FileId = number; + +/** + * A pointer to a specific section of the source code. + */ +export interface SourceCodeLocation { + /** + * The section of the source code. + */ + span: { + /** + * The byte where the section starts. + */ + start: number; + /** + * The byte where the section ends. + */ + end: number; + }; + /** + * The source code file pointed to. + */ + file: FileId; +} + +/** + * The location of an opcode in the bytecode. + * It's a string of the form `{acirIndex}` or `{acirIndex}:{brilligIndex}`. + */ +export type OpcodeLocation = string; + +/** + * The debug information for a given function. + */ +export interface DebugInfo { + /** + * A map of the opcode location to the source code location. + */ + locations: Record; +} + +/** + * Maps a file ID to its metadata for debugging purposes. + */ +export type DebugFileMap = Record< + FileId, + { + /** + * The source code of the file. + */ + source: string; + /** + * The path of the file. + */ + path: string; + } +>; + +/** Compilation warning */ +export type Warning = unknown; + +/** + * The compilation artifacts of a given contract. + */ +export interface ContractCompilationArtifacts { + /** + * The compiled contract. + */ + contract: ContractArtifact; + + /** Compilation warnings. */ + warnings: Warning[]; +} + +/** + * The compilation artifacts of a given program. + */ +export interface ProgramCompilationArtifacts { + /** + * not part of the compilation output, injected later + */ + name: string; + /** + * The compiled contract. + */ + program: ProgramArtifact; + + /** Compilation warnings. */ + warnings: Warning[]; +} + +/** + * output of Noir Wasm compilation, can be for a contract or lib/binary + */ +export type CompilationResult = ContractCompilationArtifacts | ProgramCompilationArtifacts; + +/** + * Check if it has Contract unique property + */ +export function isContractCompilationArtifacts(artifact: CompilationResult): artifact is ContractCompilationArtifacts { + return (artifact as ContractCompilationArtifacts).contract !== undefined; +} + +/** + * Check if it has Contract unique property + */ +export function isProgramCompilationArtifacts(artifact: CompilationResult): artifact is ProgramCompilationArtifacts { + return (artifact as ProgramCompilationArtifacts).program !== undefined; +} diff --git a/compiler/wasm/src/types/noir_package_config.ts b/compiler/wasm/src/types/noir_package_config.ts new file mode 100644 index 00000000000..5f07c380cf3 --- /dev/null +++ b/compiler/wasm/src/types/noir_package_config.ts @@ -0,0 +1,53 @@ +type NoirGitDependencySchema = { + git: string; + tag: string; + directory?: string; +}; + +type NoirLocalDependencySchema = { + path: string; +}; + +type NoirPackageType = 'lib' | 'contract' | 'bin'; +type NoirPackageConfigSchema = { + package: { + name: string; + type: NoirPackageType; + entry?: string; + description?: string; + authors?: string[]; + compiler_version?: string; + backend?: string; + license?: string; + }; + dependencies: Record; +}; + +/** + * Noir package configuration. + */ +export type PackageConfig = NoirPackageConfigSchema; + +/** + * A remote package dependency. + */ +export type GitDependencyConfig = NoirGitDependencySchema; + +/** + * A local package dependency. + */ +export type LocalDependencyConfig = NoirLocalDependencySchema; + +/** + * A package dependency. + */ +export type DependencyConfig = GitDependencyConfig | LocalDependencyConfig; + +/** + * Checks that an object is a package configuration. + * @param config - Config to check + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +export function parseNoirPackageConfig(config: any): PackageConfig { + return config; +} diff --git a/compiler/wasm/src/utils.ts b/compiler/wasm/src/utils.ts new file mode 100644 index 00000000000..513f7c51617 --- /dev/null +++ b/compiler/wasm/src/utils.ts @@ -0,0 +1,53 @@ +import { sep } from 'path'; + +/** Structured log data to include with the message. */ +export type LogData = Record; + +/** A callable logger instance. */ +export type LogFn = (msg: string, data?: LogData) => void; + +export function fileURLToPath(uri: string): string { + if (typeof uri !== 'string' || uri.length <= 7 || uri.substring(0, 7) !== 'file://') { + throw new TypeError('must pass in a file:// URI to convert to a file path'); + } + + const rest = decodeURI(uri.substring(7)); + const firstSlash = rest.indexOf('/'); + let host = rest.substring(0, firstSlash); + let path = rest.substring(firstSlash + 1); + + // 2. Scheme Definition + // As a special case, can be the string "localhost" or the empty + // string; this is interpreted as "the machine from which the URL is + // being interpreted". + if (host === 'localhost') { + host = ''; + } + + if (host) { + host = sep + sep + host; + } + + // 3.2 Drives, drive letters, mount points, file system root + // Drive letters are mapped into the top of a file URI in various ways, + // depending on the implementation; some applications substitute + // vertical bar ("|") for the colon after the drive letter, yielding + // "file:///c|/tmp/test.txt". In some cases, the colon is left + // unchanged, as in "file:///c:/tmp/test.txt". In other cases, the + // colon is simply omitted, as in "file:///c/tmp/test.txt". + path = path.replace(/^(.+)\|/, '$1:'); + + // for Windows, we need to invert the path separators from what a URI uses + if (sep === '\\') { + path = path.replace(/\//g, '\\'); + } + + if (/^.+:/.test(path)) { + // has Windows drive at beginning of path + } else { + // unix path… + path = sep + path; + } + + return host + path; +} diff --git a/compiler/wasm/test/browser/index.test.ts b/compiler/wasm/test/browser/index.test.ts deleted file mode 100644 index 346c20c834c..00000000000 --- a/compiler/wasm/test/browser/index.test.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { expect } from '@esm-bundle/chai'; -import initNoirWasm, { PathToFileSourceMap, compile } from '@noir-lang/noir_wasm'; -import { - depsScriptExpectedArtifact, - depsScriptSourcePath, - libASourcePath, - libBSourcePath, - simpleScriptExpectedArtifact, - simpleScriptSourcePath, -} from '../shared'; - -beforeEach(async () => { - await initNoirWasm(); -}); - -async function getFileContent(path: string): Promise { - const url = new URL(path, import.meta.url); - const response = await fetch(url); - return await response.text(); -} - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const compiledData = await getFileContent(path); - return JSON.parse(compiledData); -} - -describe('noir wasm', () => { - describe('can compile script without dependencies', () => { - it('matching nargos compilation', async () => { - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code('main.nr', await getFileContent(simpleScriptSourcePath)); - - const wasmCircuit = await compile('main.nr', undefined, undefined, sourceMap); - const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(20e3); // 20 seconds - }); - - describe('can compile script with dependencies', () => { - it('matching nargos compilation', async () => { - const [scriptSource, libASource, libBSource] = await Promise.all([ - getFileContent(depsScriptSourcePath), - getFileContent(libASourcePath), - getFileContent(libBSourcePath), - ]); - - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code('script/main.nr', scriptSource); - sourceMap.add_source_code('lib_a/lib.nr', libASource); - sourceMap.add_source_code('lib_b/lib.nr', libBSource); - - const wasmCircuit = await compile( - 'script/main.nr', - false, - { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], - }, - }, - sourceMap, - ); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(20e3); // 20 seconds - }); -}); diff --git a/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts new file mode 100644 index 00000000000..0d1e22e288f --- /dev/null +++ b/compiler/wasm/test/compiler/browser/compile_with_deps.test.ts @@ -0,0 +1,43 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { ContractArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; + +const paths = getPaths('.'); + +async function getFile(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response; +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFile(path); + const compiledData = await response.text(); + return JSON.parse(compiledData); +} + +describe('noir-compiler/browser', () => { + shouldCompileIdentically( + async () => { + const { contractExpectedArtifact } = paths; + const fm = createFileManager('/'); + const files = Object.values(paths).filter((fileOrDir) => /^\.?\/.*\..*$/.test(fileOrDir)); + for (const path of files) { + console.log(path); + await fm.writeFile(path, (await getFile(path)).body as ReadableStream); + } + const nargoArtifact = (await getPrecompiledSource(contractExpectedArtifact)) as ContractArtifact; + const noirWasmArtifact = await compile(fm, '/fixtures/noir-contract'); + + return { nargoArtifact, noirWasmArtifact }; + }, + expect, + 60 * 20e3, + ); +}); diff --git a/compiler/wasm/test/compiler/node/compile_with_deps.test.ts b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts new file mode 100644 index 00000000000..2a402dc9d02 --- /dev/null +++ b/compiler/wasm/test/compiler/node/compile_with_deps.test.ts @@ -0,0 +1,20 @@ +import { join, resolve } from 'path'; +import { getPaths } from '../../shared'; + +import { expect } from 'chai'; +import { compile, createFileManager } from '@noir-lang/noir_wasm'; +import { readFile } from 'fs/promises'; +import { ContractArtifact } from '../../../src/types/noir_artifact'; +import { shouldCompileIdentically } from '../shared/compile_with_deps.test'; + +const basePath = resolve(join(__dirname, '../../')); +const { contractProjectPath, contractExpectedArtifact } = getPaths(basePath); + +describe('noir-compiler/node', () => { + shouldCompileIdentically(async () => { + const fm = createFileManager(contractProjectPath); + const nargoArtifact = JSON.parse((await readFile(contractExpectedArtifact)).toString()) as ContractArtifact; + const noirWasmArtifact = await compile(fm); + return { nargoArtifact, noirWasmArtifact }; + }, expect); +}); diff --git a/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts b/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts new file mode 100644 index 00000000000..0960cba0665 --- /dev/null +++ b/compiler/wasm/test/compiler/shared/compile_with_deps.test.ts @@ -0,0 +1,80 @@ +import { CompilationResult, inflateDebugSymbols } from '@noir-lang/noir_wasm'; +import { type expect as Expect } from 'chai'; +import { + ContractArtifact, + ContractCompilationArtifacts, + DebugFileMap, + DebugInfo, + NoirFunctionEntry, +} from '../../../src/types/noir_artifact'; + +export function shouldCompileIdentically( + compileFn: () => Promise<{ nargoArtifact: ContractArtifact; noirWasmArtifact: CompilationResult }>, + expect: typeof Expect, + timeout = 5000, +) { + it('both nargo and noir_wasm should compile identically', async () => { + // Compile! + const { nargoArtifact, noirWasmArtifact } = await compileFn(); + + // Prepare nargo artifact + const [nargoDebugInfos, nargoFileMap] = deleteDebugMetadata(nargoArtifact); + normalizeVersion(nargoArtifact); + + // Prepare noir-wasm artifact + const noirWasmContract = (noirWasmArtifact as ContractCompilationArtifacts).contract; + expect(noirWasmContract).not.to.be.undefined; + const [noirWasmDebugInfos, norWasmFileMap] = deleteDebugMetadata(noirWasmContract); + normalizeVersion(noirWasmContract); + + // We first compare both contracts without considering debug info + expect(nargoArtifact).to.deep.eq(noirWasmContract); + + // Compare the file maps, ignoring keys, since those depend in the order in which files are visited, + // which may change depending on the file manager implementation. Also ignores paths, since the base + // path is reported differently between nargo and noir-wasm. + expect(getSources(nargoFileMap)).to.have.members(getSources(norWasmFileMap)); + + // Compare the debug symbol information, ignoring the actual ids used for file identifiers. + // Debug symbol info looks like the following, what we need is to ignore the 'file' identifiers + // {"locations":{"0":[{"span":{"start":141,"end":156},"file":39},{"span":{"start":38,"end":76},"file":38},{"span":{"start":824,"end":862},"file":23}]}} + expect(nargoDebugInfos).to.deep.eq(noirWasmDebugInfos); + }).timeout(timeout); +} + +/** Remove commit identifier from version, which may not match depending on cached nargo and noir-wasm */ +function normalizeVersion(contract: ContractArtifact) { + contract.noir_version = contract.noir_version.replace(/\+.+$/, ''); +} + +/** Extracts the debug symbols from all functions, decodes them, removes their file identifiers, and deletes them from the artifact. */ +function extractDebugInfos(fns: NoirFunctionEntry[]) { + return fns.map((fn) => { + const debugSymbols = inflateDebugSymbols(fn.debug_symbols); + delete (fn as Partial).debug_symbols; + clearFileIdentifiers(debugSymbols); + return debugSymbols; + }); +} + +/** Deletes all debug info from a contract and returns it. */ +function deleteDebugMetadata(contract: ContractArtifact) { + contract.functions.sort((a, b) => a.name.localeCompare(b.name)); + const fileMap = contract.file_map; + delete (contract as Partial).file_map; + return [extractDebugInfos(contract.functions), fileMap]; +} + +/** Clears file identifiers from a set of debug symbols. */ +function clearFileIdentifiers(debugSymbols: DebugInfo) { + for (const loc of Object.values(debugSymbols.locations)) { + for (const span of loc) { + span.file = 0; + } + } +} + +/** Returns list of sources from file map, dropping paths along the way, since they don't match depending on the file manager. */ +function getSources(fileMap: DebugFileMap) { + return Object.values(fileMap).map((file) => file.source); +} diff --git a/compiler/wasm/test/dependencies/dependency-manager.test.ts b/compiler/wasm/test/dependencies/dependency-manager.test.ts new file mode 100644 index 00000000000..4e2fdbae515 --- /dev/null +++ b/compiler/wasm/test/dependencies/dependency-manager.test.ts @@ -0,0 +1,108 @@ +import { DependencyConfig } from '../../src/types/noir_package_config'; +import { Package } from '../../src/noir/package'; +import { DependencyManager } from '../../src/noir/dependencies/dependency-manager'; +import { Dependency, DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; + +import { expect } from 'chai'; + +describe('DependencyManager', () => { + let manager: DependencyManager; + + beforeEach(() => { + manager = new DependencyManager( + [new TestDependencyResolver()], + new Package('/test_contract', '/test_contract/src', { + dependencies: { + lib1: { + path: '/lib1', + }, + lib2: { + path: '/lib2', + }, + lib3: { + path: '/lib3', + }, + }, + package: { + name: 'test_contract', + type: 'contract', + }, + }), + ); + }); + + it('successfully resolves dependencies', async () => { + await expect(manager.resolveDependencies()).to.eventually.be.undefined; + }); + + it('resolves all libraries', async () => { + await manager.resolveDependencies(); + expect(manager.getPackageNames()).to.eql(['lib1', 'lib2', 'lib3']); + }); + + it('resolves root dependencies', async () => { + await manager.resolveDependencies(); + expect(manager.getEntrypointDependencies()).to.eql(['lib1', 'lib2', 'lib3']); + }); + + it('resolves library dependencies', async () => { + await manager.resolveDependencies(); + expect(manager.getLibraryDependencies()).to.eql({ + lib2: ['lib3'], + }); + }); +}); + +class TestDependencyResolver implements DependencyResolver { + // eslint-disable-next-line require-await + public async resolveDependency(pkg: Package, dep: DependencyConfig): Promise { + if (!('path' in dep)) { + return null; + } + + switch (dep.path) { + case '/lib1': + return { + version: '', + package: new Package('/lib1', '/lib1/src', { + dependencies: {}, + package: { + name: 'lib1', + type: 'lib', + }, + }), + }; + + case '/lib2': + return { + version: '', + package: new Package('/lib2', '/lib2/src', { + dependencies: { + lib3: { + path: '../lib3', + }, + }, + package: { + name: 'lib2', + type: 'lib', + }, + }), + }; + + case '/lib3': + return { + version: '', + package: new Package('/lib3', '/lib3/src', { + dependencies: {}, + package: { + name: 'lib3', + type: 'lib', + }, + }), + }; + + default: + throw new Error(); + } + } +} diff --git a/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts b/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts new file mode 100644 index 00000000000..e7fae8afe8e --- /dev/null +++ b/compiler/wasm/test/dependencies/github-dependency-resolver.test.ts @@ -0,0 +1,149 @@ +import { Volume, createFsFromVolume } from 'memfs'; +import { dirname, join, resolve } from 'path'; + +import { FileManager } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; +import { readdirRecursive } from '../../src/noir/file-manager/nodejs-file-manager'; + +import { Package } from '../../src/noir/package'; +import { DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; +import { + GithubDependencyResolver, + resolveGithubCodeArchive, + safeFilename, +} from '../../src/noir/dependencies/github-dependency-resolver'; +import { GitDependencyConfig } from '../../src/types/noir_package_config'; +import Sinon, { SinonStub } from 'sinon'; +import chai, { expect } from 'chai'; +import forEach from 'mocha-each'; +import chaiAsPromised from 'chai-as-promised'; +import AdmZip from 'adm-zip'; + +chai.use(chaiAsPromised); + +const fixtures = resolve(join(__dirname, '../fixtures')); + +describe('GithubDependencyResolver', () => { + let resolver: DependencyResolver; + let fm: FileManager; + let pkg: Package; + let libDependency: GitDependencyConfig; + let fetchStub: SinonStub | undefined; + + beforeEach(() => { + fetchStub = Sinon.stub(globalThis, 'fetch'); + fm = createMemFSFileManager(createFsFromVolume(new Volume()), '/'); + + libDependency = { + git: 'https://github.com/example/repo', + tag: 'v1.0.0', + }; + + pkg = new Package('/test_contract', '/test_contract/src', { + dependencies: { + // eslint-disable-next-line camelcase + lib_c: libDependency, + }, + package: { + name: 'test_contract', + type: 'contract', + }, + }); + + resolver = new GithubDependencyResolver(fm); + + // cut off outside access + fetchStub.onCall(0).throws(new Error()); + }); + + afterEach(() => { + fetchStub?.restore(); + }); + + it("returns null if it can't resolve a dependency", async () => { + const dep = await resolver.resolveDependency(pkg, { + path: '/lib-c', + }); + + expect(dep).to.be.null; + }); + + it('resolves Github dependency', async () => { + const zip = new AdmZip(); + const testLibPath = join(fixtures, 'deps', 'lib-c'); + for (const filePath of await readdirRecursive(testLibPath)) { + zip.addLocalFile(filePath, dirname(filePath.replace(testLibPath, 'lib-c'))); + } + + fetchStub?.onCall(0).returns(new Response(zip.toBuffer(), { status: 200 })); + + const lib = await resolver.resolveDependency(pkg, libDependency); + expect(lib).not.to.be.undefined; + expect(lib!.version).to.eq(libDependency.tag); + expect(fm.hasFileSync(lib!.package.getEntryPointPath())).to.eq(true); + }); + + forEach([ + [ + 'https://github.com/example/lib.nr/archive/v1.0.0.zip', + 'zip', + { + git: 'https://github.com/example/lib.nr', + tag: 'v1.0.0', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/v1.0.0.tar.gz', + 'tar', + { + git: 'https://github.com/example/lib.nr', + tag: 'v1.0.0', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/HEAD.zip', + 'zip', + { + git: 'https://github.com/example/lib.nr', + tag: 'HEAD', + }, + ], + [ + 'https://github.com/example/lib.nr/archive/HEAD.tar.gz', + 'tar', + { + git: 'https://github.com/example/lib.nr', + tag: 'HEAD', + }, + ], + ]).it( + 'resolves to the correct code archive URL %s', + async (href: string, format: 'zip' | 'tar', dep: GitDependencyConfig) => { + const archiveUrl = resolveGithubCodeArchive(dep, format); + expect(archiveUrl.href).to.eq(href); + }, + ); + + forEach([ + { git: 'https://github.com/', tag: 'v1' }, + { git: 'https://github.com/foo', tag: 'v1' }, + { git: 'https://example.com', tag: 'v1' }, + ]).it('throws if the Github URL is invalid %j', (dep) => { + expect(() => resolveGithubCodeArchive(dep, 'zip')).to.throw(); + }); + + forEach([ + ['main', 'main'], + ['v1.0.0', 'v1.0.0'], + ['../../../etc/passwd', '.._.._.._etc_passwd'], + ['/etc/passwd', 'etc_passwd'], + ['/SomeOrg/some-repo@v1.0.0', 'SomeOrg_some-repo@v1.0.0'], + ['SomeOrg/some-repo@v1.0.0', 'SomeOrg_some-repo@v1.0.0'], + ]).it('generates safe file names from %s', (value, expected) => { + expect(safeFilename(value)).to.eq(expected); + }); + + forEach(['']).it('rejects invalid values', (value) => { + expect(() => safeFilename(value)).to.throw(); + }); +}); diff --git a/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts b/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts new file mode 100644 index 00000000000..f44f618a7cb --- /dev/null +++ b/compiler/wasm/test/dependencies/local-dependency-resolver.test.ts @@ -0,0 +1,52 @@ +import { createFsFromVolume, Volume } from 'memfs'; +import { readFile } from 'fs/promises'; + +import { FileManager } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; +import { Package } from '../../src/noir/package'; +import { DependencyResolver } from '../../src/noir/dependencies/dependency-resolver'; +import { LocalDependencyResolver } from '../../src/noir/dependencies/local-dependency-resolver'; +import { expect } from 'chai'; +import forEach from 'mocha-each'; +import { join } from 'path'; + +describe('DependencyResolver', () => { + let resolver: DependencyResolver; + let fm: FileManager; + let pkg: Package; + + beforeEach(async () => { + const fixtures = join(__dirname, '../fixtures'); + const memFS = createFsFromVolume(new Volume()); + memFS.mkdirSync('/noir-contract/src', { recursive: true }); + memFS.mkdirSync('/lib-c/src', { recursive: true }); + memFS.writeFileSync('/noir-contract/Nargo.toml', await readFile(join(fixtures, 'noir-contract/Nargo.toml'))); + memFS.writeFileSync('/noir-contract/src/main.nr', await readFile(join(fixtures, 'noir-contract/src/main.nr'))); + memFS.writeFileSync('/lib-c/Nargo.toml', await readFile(join(fixtures, 'deps/lib-c/Nargo.toml'))); + memFS.writeFileSync('/lib-c/src/lib.nr', await readFile(join(fixtures, 'deps/lib-c/src/lib.nr'))); + + fm = createMemFSFileManager(memFS, '/'); + + pkg = await Package.open('/noir-contract', fm); + resolver = new LocalDependencyResolver(fm); + }); + + it("returns null if it can't resolve a dependency", async () => { + const dep = await resolver.resolveDependency(pkg, { + git: 'git@some-git-host', + directory: '/', + tag: 'v1.0.0', + }); + + expect(dep).to.be.null; + }); + + forEach(['../noir-contract', '/noir-contract']).it('resolves a known dependency %s', async (path) => { + const lib = await resolver.resolveDependency(pkg, { + path, + }); + expect(lib).not.to.be.undefined; + expect(lib!.version).to.be.undefined; + expect(fm.hasFileSync(lib!.package.getEntryPointPath())).to.eq(true); + }); +}); diff --git a/compiler/wasm/test/file-manager/file-manager.test.ts b/compiler/wasm/test/file-manager/file-manager.test.ts new file mode 100644 index 00000000000..8a80854109a --- /dev/null +++ b/compiler/wasm/test/file-manager/file-manager.test.ts @@ -0,0 +1,96 @@ +import { Volume, createFsFromVolume } from 'memfs'; +import { existsSync, mkdtempSync, rmSync } from 'fs'; +import * as fs from 'fs/promises'; +import { tmpdir } from 'os'; +import { join } from 'path'; + +import { FileManager, FileSystem } from '../../src/noir/file-manager/file-manager'; +import { createMemFSFileManager } from '../../src/noir/file-manager/memfs-file-manager'; + +import { expect } from 'chai'; +import forEach from 'mocha-each'; + +const memFS = (): { fm: FileManager; teardown: () => void } => { + const fm = createMemFSFileManager(createFsFromVolume(new Volume()), '/'); + return { + fm, + // no-op, it's all in memory + teardown: () => {}, + }; +}; + +const nodeFS = (): { fm: FileManager; teardown: () => void } => { + const fileSystem: FileSystem = { + existsSync: existsSync, + mkdir: async (dir: string, opts?: { recursive: boolean }) => { + await fs.mkdir(dir, opts); + }, + writeFile: fs.writeFile, + readFile: fs.readFile, + rename: fs.rename, + readdir: fs.readdir, + }; + + const dir = mkdtempSync(join(tmpdir(), 'noir-compiler-test')); + const fm = new FileManager(fileSystem, dir); + + return { + fm, + teardown: () => { + rmSync(dir, { + recursive: true, + }); + }, + }; +}; + +/** + * Declare the default test suite for a file manager + * @param setup - Function to setup a file manager + * @param teardown - Optional function to call at the end of the test + */ +forEach([ + ['memFs', memFS], + ['nodeFS', nodeFS], +]).describe('FileManager: %s', (name, fs) => { + let fm: FileManager; + let testFileContent: string; + let testFileBytes: Uint8Array; + let teardown: () => void; + + beforeEach(() => { + ({ fm, teardown } = fs()); + testFileContent = 'foo'; + testFileBytes = new TextEncoder().encode(testFileContent); + }); + + afterEach(() => { + return teardown?.(); + }); + + it(`saves files and correctly reads bytes back using ${name}`, async () => { + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.readFile('test.txt')).to.eventually.eq(testFileBytes); + }); + + it(`saves files and correctly reads UTF-8 string back using ${name}`, async () => { + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.readFile('test.txt', 'utf-8')).to.eventually.eq(testFileContent); + }); + + it(`correctly checks if file exists or not using ${name}`, async () => { + expect(fm.hasFileSync('test.txt')).to.eq(false); + await fm.writeFile('test.txt', new Blob([testFileBytes]).stream()); + expect(fm.hasFileSync('test.txt')).to.eq(true); + }); + + it(`moves files using ${name}`, async () => { + await fm.writeFile('test.txt.tmp', new Blob([testFileBytes]).stream()); + expect(fm.hasFileSync('test.txt.tmp')).to.eq(true); + + await fm.moveFile('test.txt.tmp', 'test.txt'); + + expect(fm.hasFileSync('test.txt.tmp')).to.eq(false); + expect(fm.hasFileSync('test.txt')).to.eq(true); + }); +}); diff --git a/compiler/wasm/fixtures/deps/lib-a/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-a/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/deps/lib-a/Nargo.toml rename to compiler/wasm/test/fixtures/deps/lib-a/Nargo.toml diff --git a/compiler/wasm/fixtures/deps/lib-a/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-a/src/lib.nr similarity index 100% rename from compiler/wasm/fixtures/deps/lib-a/src/lib.nr rename to compiler/wasm/test/fixtures/deps/lib-a/src/lib.nr diff --git a/compiler/wasm/fixtures/deps/lib-b/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-b/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/deps/lib-b/Nargo.toml rename to compiler/wasm/test/fixtures/deps/lib-b/Nargo.toml diff --git a/compiler/wasm/fixtures/deps/lib-b/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-b/src/lib.nr similarity index 100% rename from compiler/wasm/fixtures/deps/lib-b/src/lib.nr rename to compiler/wasm/test/fixtures/deps/lib-b/src/lib.nr diff --git a/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml b/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml new file mode 100644 index 00000000000..dafdb62e045 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "lib_c" +authors = [""] +compiler_version = ">=0.18.0" +type = "lib" + +[dependencies] \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr new file mode 100644 index 00000000000..5c0b5a621e0 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/lib.nr @@ -0,0 +1 @@ +mod module; \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr new file mode 100644 index 00000000000..2746c97edf0 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module.nr @@ -0,0 +1 @@ +mod foo; \ No newline at end of file diff --git a/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr new file mode 100644 index 00000000000..e0c82fb1960 --- /dev/null +++ b/compiler/wasm/test/fixtures/deps/lib-c/src/module/foo.nr @@ -0,0 +1,3 @@ +pub fn bar(param: Field) -> Field { + dep::std::hash::pedersen_hash([param]) +} diff --git a/compiler/wasm/test/fixtures/noir-contract/Nargo.toml b/compiler/wasm/test/fixtures/noir-contract/Nargo.toml new file mode 100644 index 00000000000..2e64f3ebc9c --- /dev/null +++ b/compiler/wasm/test/fixtures/noir-contract/Nargo.toml @@ -0,0 +1,8 @@ +[package] +name = "test" +authors = [""] +compiler_version = ">=0.18.0" +type = "contract" + +[dependencies] +test = { path = "../deps/lib-c" } diff --git a/compiler/wasm/test/fixtures/noir-contract/src/main.nr b/compiler/wasm/test/fixtures/noir-contract/src/main.nr new file mode 100644 index 00000000000..b980af369cf --- /dev/null +++ b/compiler/wasm/test/fixtures/noir-contract/src/main.nr @@ -0,0 +1,12 @@ +contract TestContract { + use dep::test::module::foo; + + fn constructor(param: Field, pub_param: pub Field) -> pub [Field; 2] { + [foo::bar(param), param + pub_param] + } + + open fn openFunction() -> pub Field { + 42 + } + +} diff --git a/compiler/wasm/fixtures/simple/noir-script/Nargo.toml b/compiler/wasm/test/fixtures/simple/Nargo.toml similarity index 100% rename from compiler/wasm/fixtures/simple/noir-script/Nargo.toml rename to compiler/wasm/test/fixtures/simple/Nargo.toml diff --git a/compiler/wasm/fixtures/simple/noir-script/src/main.nr b/compiler/wasm/test/fixtures/simple/src/main.nr similarity index 100% rename from compiler/wasm/fixtures/simple/noir-script/src/main.nr rename to compiler/wasm/test/fixtures/simple/src/main.nr diff --git a/compiler/wasm/fixtures/deps/noir-script/Nargo.toml b/compiler/wasm/test/fixtures/with-deps/Nargo.toml similarity index 70% rename from compiler/wasm/fixtures/deps/noir-script/Nargo.toml rename to compiler/wasm/test/fixtures/with-deps/Nargo.toml index 7c8182a02ae..b7543525059 100644 --- a/compiler/wasm/fixtures/deps/noir-script/Nargo.toml +++ b/compiler/wasm/test/fixtures/with-deps/Nargo.toml @@ -4,4 +4,4 @@ type="bin" authors = [""] [dependencies] -lib_a = { path="../lib-a" } +lib_a = { path="../deps/lib-a" } diff --git a/compiler/wasm/fixtures/deps/noir-script/src/main.nr b/compiler/wasm/test/fixtures/with-deps/src/main.nr similarity index 100% rename from compiler/wasm/fixtures/deps/noir-script/src/main.nr rename to compiler/wasm/test/fixtures/with-deps/src/main.nr diff --git a/compiler/wasm/test/node/index.test.ts b/compiler/wasm/test/node/index.test.ts deleted file mode 100644 index 5cf9e3be2df..00000000000 --- a/compiler/wasm/test/node/index.test.ts +++ /dev/null @@ -1,75 +0,0 @@ -import { expect } from 'chai'; -import { - depsScriptSourcePath, - depsScriptExpectedArtifact, - libASourcePath, - libBSourcePath, - simpleScriptSourcePath, - simpleScriptExpectedArtifact, -} from '../shared'; -import { readFileSync } from 'node:fs'; -import { join, resolve } from 'node:path'; -import { compile, PathToFileSourceMap } from '@noir-lang/noir_wasm'; - -// eslint-disable-next-line @typescript-eslint/no-explicit-any -async function getPrecompiledSource(path: string): Promise { - const compiledData = readFileSync(resolve(__dirname, path)).toString(); - return JSON.parse(compiledData); -} - -describe('noir wasm compilation', () => { - describe('can compile simple scripts', () => { - it('matching nargos compilation', async () => { - const sourceMap = new PathToFileSourceMap(); - sourceMap.add_source_code( - join(__dirname, simpleScriptSourcePath), - readFileSync(join(__dirname, simpleScriptSourcePath), 'utf-8'), - ); - const wasmCircuit = await compile(join(__dirname, simpleScriptSourcePath), undefined, undefined, sourceMap); - const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(10e3); - }); - - describe('can compile scripts with dependencies', () => { - const sourceMap: PathToFileSourceMap = new PathToFileSourceMap(); - beforeEach(() => { - sourceMap.add_source_code('script/main.nr', readFileSync(join(__dirname, depsScriptSourcePath), 'utf-8')); - sourceMap.add_source_code('lib_a/lib.nr', readFileSync(join(__dirname, libASourcePath), 'utf-8')); - sourceMap.add_source_code('lib_b/lib.nr', readFileSync(join(__dirname, libBSourcePath), 'utf-8')); - }); - - it('matching nargos compilation', async () => { - const wasmCircuit = await compile( - 'script/main.nr', - false, - { - root_dependencies: ['lib_a'], - library_dependencies: { - lib_a: ['lib_b'], - }, - }, - sourceMap, - ); - - const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); - - if (!('program' in wasmCircuit)) { - throw Error('Expected program to be present'); - } - - // We don't expect the hashes to match due to how `noir_wasm` handles dependencies - expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); - expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); - expect(wasmCircuit.program.backend).to.eq(cliCircuit.backend); - }).timeout(10e3); - }); -}); diff --git a/compiler/wasm/test/shared.ts b/compiler/wasm/test/shared.ts index 6fc370f7ac8..9181919ff39 100644 --- a/compiler/wasm/test/shared.ts +++ b/compiler/wasm/test/shared.ts @@ -1,8 +1,41 @@ -export const simpleScriptSourcePath = '../../fixtures/simple/noir-script/src/main.nr'; -export const simpleScriptExpectedArtifact = '../../fixtures/simple/noir-script/target/noir_wasm_testing.json'; +export function getPaths(basePath: string) { + const fixtures = `${basePath}/fixtures`; -export const depsScriptSourcePath = '../../fixtures/deps/noir-script/src/main.nr'; -export const depsScriptExpectedArtifact = '../../fixtures/deps/noir-script/target/noir_wasm_testing.json'; + const simpleScriptSourcePath = `${fixtures}/simple/src/main.nr`; + const simpleScriptExpectedArtifact = `${fixtures}/simple/target/noir_wasm_testing.json`; -export const libASourcePath = '../../fixtures/deps/lib-a/src/lib.nr'; -export const libBSourcePath = '../../fixtures/deps/lib-b/src/lib.nr'; + const depsScriptSourcePath = `${fixtures}/with-deps/src/main.nr`; + const depsScriptExpectedArtifact = `${fixtures}/with-deps/target/noir_wasm_testing.json`; + + const libASourcePath = `${fixtures}/deps/lib-a/src/lib.nr`; + const libBSourcePath = `${fixtures}/deps/lib-b/src/lib.nr`; + + const contractProjectPath = `${fixtures}/noir-contract`; + const contractSourcePath = `${contractProjectPath}/src/main.nr`; + const contractTOMLPath = `${contractProjectPath}/Nargo.toml`; + const contractExpectedArtifact = `${contractProjectPath}/target/test-TestContract.json`; + + const libCProjectPath = `${fixtures}/deps/lib-c`; + const libCSourcePath = `${libCProjectPath}/src/lib.nr`; + const libCModulePath = `${libCProjectPath}/src/module.nr`; + const libCModuleSourcePath = `${libCProjectPath}/src/module/foo.nr`; + const libCTOMLPath = `${libCProjectPath}/Nargo.toml`; + + return { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, + contractProjectPath, + contractSourcePath, + contractTOMLPath, + contractExpectedArtifact, + libCProjectPath, + libCSourcePath, + libCModulePath, + libCModuleSourcePath, + libCTOMLPath, + }; +} diff --git a/compiler/wasm/test/wasm/browser/index.test.ts b/compiler/wasm/test/wasm/browser/index.test.ts new file mode 100644 index 00000000000..3122fa57945 --- /dev/null +++ b/compiler/wasm/test/wasm/browser/index.test.ts @@ -0,0 +1,159 @@ +/* eslint-disable @typescript-eslint/ban-ts-comment */ +import { getPaths } from '../../shared'; +import { expect } from '@esm-bundle/chai'; + +import init, { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/esm'; + +// @ts-ignore +await init(); + +const { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, +} = getPaths('.'); + +async function getFileAsString(path: string) { + // @ts-ignore + const basePath = new URL('./../../', import.meta.url).toString().replace(/\/$/g, ''); + const url = `${basePath}${path.replace('.', '')}`; + const response = await fetch(url); + return response.text(); +} + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const response = await getFileAsString(path); + return JSON.parse(response); +} + +describe('noir wasm compilation', () => { + describe('can compile simple scripts', () => { + it('matching nargos compilation', async () => { + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', await getFileAsString(simpleScriptSourcePath)); + const wasmCircuit = compile('script/main.nr', undefined, undefined, sourceMap); + const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies', () => { + const sourceMap = new PathToFileSourceMap(); + beforeEach(async () => { + sourceMap.add_source_code('script/main.nr', await getFileAsString(depsScriptSourcePath)); + sourceMap.add_source_code('lib_a/lib.nr', await getFileAsString(libASourcePath)); + sourceMap.add_source_code('lib_b/lib.nr', await getFileAsString(libBSourcePath)); + }); + + it('matching nargos compilation', async () => { + const wasmCircuit = compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies -- context-api', () => { + let sourceMap: PathToFileSourceMap; + beforeEach(async () => { + sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', await getFileAsString(depsScriptSourcePath)); + sourceMap.add_source_code('lib_a/lib.nr', await getFileAsString(libASourcePath)); + sourceMap.add_source_code('lib_b/lib.nr', await getFileAsString(libBSourcePath)); + }); + + it('matching nargos compilation - context-api', async () => { + const compilerContext = new CompilerContext(sourceMap); + + // Process root crate + const root_crate_id = compilerContext.process_root_crate('script/main.nr'); + // Process dependencies + // + // This can be direct dependencies or transitive dependencies + // I have named these crate_id_1 and crate_id_2 instead of `lib_a_crate_id` and `lib_b_crate_id` + // because the names of crates in a dependency graph are not determined by the actual package. + // + // It is true that each package is given a name, but if I include a `lib_a` as a dependency + // in my library, I do not need to refer to it as `lib_a` in my dependency graph. + // See https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml + // + // If you have looked at graphs before, then you can think of the dependency graph as a directed acyclic graph (DAG) + const crate_id_1 = compilerContext.process_dependency_crate('lib_a/lib.nr'); + const crate_id_2 = compilerContext.process_dependency_crate('lib_b/lib.nr'); + + // Root crate depends on `crate_id_1` and this edge is called `lib_a` + compilerContext.add_dependency_edge('lib_a', root_crate_id, crate_id_1); + // `crate_id_1` depends on `crate_id_2` and this edge is called `lib_b` + compilerContext.add_dependency_edge('lib_b', crate_id_1, crate_id_2); + + const program_width = 3; + const wasmCircuit = await compilerContext.compile_program(program_width); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + + it('matching nargos compilation - context-implementation-compile-api', async () => { + const wasmCircuit = await compile_( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + }); +}); diff --git a/compiler/wasm/test/wasm/node/index.test.ts b/compiler/wasm/test/wasm/node/index.test.ts new file mode 100644 index 00000000000..c73ce7477e5 --- /dev/null +++ b/compiler/wasm/test/wasm/node/index.test.ts @@ -0,0 +1,150 @@ +import { getPaths } from '../../shared'; +import { readFileSync } from 'fs'; +import { join, resolve } from 'path'; +import { expect } from 'chai'; + +import { compile, PathToFileSourceMap, compile_, CompilerContext } from '../../../build/cjs'; + +const basePath = resolve(join(__dirname, '../../')); +const { + simpleScriptSourcePath, + simpleScriptExpectedArtifact, + depsScriptSourcePath, + depsScriptExpectedArtifact, + libASourcePath, + libBSourcePath, +} = getPaths(basePath); + +// eslint-disable-next-line @typescript-eslint/no-explicit-any +async function getPrecompiledSource(path: string): Promise { + const compiledData = readFileSync(resolve(__dirname, path)).toString(); + return JSON.parse(compiledData); +} + +describe('noir wasm compilation', () => { + describe('can compile simple scripts', () => { + it('matching nargos compilation', async () => { + const sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code(simpleScriptSourcePath, readFileSync(simpleScriptSourcePath, 'utf-8')); + const wasmCircuit = compile(simpleScriptSourcePath, undefined, undefined, sourceMap); + const cliCircuit = await getPrecompiledSource(simpleScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies', () => { + const sourceMap = new PathToFileSourceMap(); + beforeEach(() => { + sourceMap.add_source_code('script/main.nr', readFileSync(depsScriptSourcePath, 'utf-8')); + sourceMap.add_source_code('lib_a/lib.nr', readFileSync(libASourcePath, 'utf-8')); + sourceMap.add_source_code('lib_b/lib.nr', readFileSync(libBSourcePath, 'utf-8')); + }); + + it('matching nargos compilation', async () => { + const wasmCircuit = compile( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + }).timeout(60 * 20e3); + }); + + describe('can compile scripts with dependencies -- context-api', () => { + let sourceMap: PathToFileSourceMap; + beforeEach(() => { + sourceMap = new PathToFileSourceMap(); + sourceMap.add_source_code('script/main.nr', readFileSync(depsScriptSourcePath, 'utf-8')); + sourceMap.add_source_code('lib_a/lib.nr', readFileSync(libASourcePath, 'utf-8')); + sourceMap.add_source_code('lib_b/lib.nr', readFileSync(libBSourcePath, 'utf-8')); + }); + + it('matching nargos compilation - context-api', async () => { + const compilerContext = new CompilerContext(sourceMap); + + // Process root crate + const root_crate_id = compilerContext.process_root_crate('script/main.nr'); + // Process dependencies + // + // This can be direct dependencies or transitive dependencies + // I have named these crate_id_1 and crate_id_2 instead of `lib_a_crate_id` and `lib_b_crate_id` + // because the names of crates in a dependency graph are not determined by the actual package. + // + // It is true that each package is given a name, but if I include a `lib_a` as a dependency + // in my library, I do not need to refer to it as `lib_a` in my dependency graph. + // See https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#renaming-dependencies-in-cargotoml + // + // If you have looked at graphs before, then you can think of the dependency graph as a directed acyclic graph (DAG) + const crate_id_1 = compilerContext.process_dependency_crate('lib_a/lib.nr'); + const crate_id_2 = compilerContext.process_dependency_crate('lib_b/lib.nr'); + + // Root crate depends on `crate_id_1` and this edge is called `lib_a` + compilerContext.add_dependency_edge('lib_a', root_crate_id, crate_id_1); + // `crate_id_1` depends on `crate_id_2` and this edge is called `lib_b` + compilerContext.add_dependency_edge('lib_b', crate_id_1, crate_id_2); + + const program_width = 3; + const wasmCircuit = await compilerContext.compile_program(program_width); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + + it('matching nargos compilation - context-implementation-compile-api', async () => { + const wasmCircuit = await compile_( + 'script/main.nr', + false, + { + root_dependencies: ['lib_a'], + library_dependencies: { + lib_a: ['lib_b'], + }, + }, + sourceMap, + ); + + const cliCircuit = await getPrecompiledSource(depsScriptExpectedArtifact); + + if (!('program' in wasmCircuit)) { + throw Error('Expected program to be present'); + } + + // We don't expect the hashes to match due to how `noir_wasm` handles dependencies + expect(wasmCircuit.program.noir_version).to.eq(cliCircuit.noir_version); + expect(wasmCircuit.program.abi).to.deep.eq(cliCircuit.abi); + expect(wasmCircuit.program.bytecode).to.eq(cliCircuit.bytecode); + }).timeout(60 * 20e3); + }); +}); diff --git a/compiler/wasm/tsconfig.esm.json b/compiler/wasm/tsconfig.esm.json new file mode 100644 index 00000000000..5826fee7c52 --- /dev/null +++ b/compiler/wasm/tsconfig.esm.json @@ -0,0 +1,6 @@ +{ + "extends": "./tsconfig.webpack.json", + "compilerOptions": { + "module": "ESNext", + }, +} \ No newline at end of file diff --git a/compiler/wasm/tsconfig.json b/compiler/wasm/tsconfig.json index eef2ad84833..6096b419d78 100644 --- a/compiler/wasm/tsconfig.json +++ b/compiler/wasm/tsconfig.json @@ -1,17 +1,20 @@ { "compilerOptions": { - "moduleResolution": "node", - "outDir": "lib", - "target": "ESNext", - "module": "ESNext", + "declaration": true, + "declarationDir": "dist/types", "strict": true, + "baseUrl": ".", "experimentalDecorators": true, - "esModuleInterop": true, "noImplicitAny": true, "removeComments": false, "preserveConstEnums": true, "sourceMap": true, - "resolveJsonModule": true, - "importHelpers": true + "importHelpers": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "target": "ESNext", + "module": "CommonJS", + "moduleResolution": "Node", + "allowJs": true, } } \ No newline at end of file diff --git a/compiler/wasm/tsconfig.webpack.json b/compiler/wasm/tsconfig.webpack.json new file mode 100644 index 00000000000..dd1c218a352 --- /dev/null +++ b/compiler/wasm/tsconfig.webpack.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "exclude": [ + "./test/**/*", + "node_modules" + ] +} \ No newline at end of file diff --git a/compiler/wasm/web-test-runner.config.mjs b/compiler/wasm/web-test-runner.config.mjs index 3fd65d56618..4d05c95f7d6 100644 --- a/compiler/wasm/web-test-runner.config.mjs +++ b/compiler/wasm/web-test-runner.config.mjs @@ -1,14 +1,14 @@ -import { defaultReporter } from "@web/test-runner"; -import { summaryReporter } from "@web/test-runner"; -import { fileURLToPath } from "url"; -import { esbuildPlugin } from "@web/dev-server-esbuild"; -import { playwrightLauncher } from "@web/test-runner-playwright"; +import { defaultReporter } from '@web/test-runner'; +import { summaryReporter } from '@web/test-runner'; +import { fileURLToPath } from 'url'; +import { esbuildPlugin } from '@web/dev-server-esbuild'; +import { playwrightLauncher } from '@web/test-runner-playwright'; const reporter = process.env.CI ? summaryReporter() : defaultReporter(); export default { browsers: [ - playwrightLauncher({ product: "chromium" }), + playwrightLauncher({ product: 'chromium' }), // playwrightLauncher({ product: "webkit" }), // playwrightLauncher({ product: "firefox" }), ], @@ -17,15 +17,8 @@ export default { ts: true, }), ], - files: ["test/browser/**/*.test.ts"], + files: ['./test/**/browser/*.test.ts'], nodeResolve: true, - testFramework: { - config: { - ui: "bdd", - timeout: 40000, - }, - }, - rootDir: fileURLToPath(new URL('./../../', import.meta.url)), + rootDir: fileURLToPath(new URL('./../../', import.meta.url)), reporters: [reporter], - }; diff --git a/compiler/wasm/webpack.config.ts b/compiler/wasm/webpack.config.ts new file mode 100644 index 00000000000..d5d70df2b8a --- /dev/null +++ b/compiler/wasm/webpack.config.ts @@ -0,0 +1,134 @@ +import { resolve, join } from 'path'; +import webpack from 'webpack'; +import 'webpack-dev-server'; +import WasmPackPlugin from '@wasm-tool/wasm-pack-plugin'; +import HtmlWebpackPlugin from 'html-webpack-plugin'; +import CopyWebpackPlugin from 'copy-webpack-plugin'; + +const config: webpack.Configuration = { + output: { + path: resolve(__dirname, 'dist'), + }, + mode: 'development', + devtool: 'source-map', + optimization: { + minimize: false, + }, + resolve: { + extensions: ['.cts', '.mts', '.ts', '.js', '.json', '.wasm'], + fallback: { + path: require.resolve('path-browserify'), + stream: require.resolve('readable-stream'), + fs: require.resolve('browserify-fs'), + buffer: require.resolve('buffer'), + }, + }, +}; + +const webConfig: webpack.Configuration = { + name: 'web', + entry: './src/index.mts', + ...config, + experiments: { asyncWebAssembly: true, outputModule: true }, + output: { + filename: 'main.mjs', + ...config.output, + path: resolve(__dirname, 'dist/web'), + library: { + type: 'module', + }, + }, + plugins: [ + new WasmPackPlugin({ + crateDirectory: resolve(__dirname), + outDir: resolve(__dirname, './build/esm'), + extraArgs: '--target web', + forceMode: process.env.WASM_OPT === 'true' ? 'production' : 'development', + }), + new HtmlWebpackPlugin({ + title: 'Noir Wasm ESM', + }), + new webpack.DefinePlugin({ + 'process.env.NODE_DEBUG': JSON.stringify(process.env.NODE_DEBUG), + }), + new webpack.ProvidePlugin({ + process: 'process/browser', + }), + new webpack.ProvidePlugin({ + Buffer: ['buffer', 'Buffer'], + }), + ], + module: { + rules: [ + { + test: /.m?ts$/, + loader: 'ts-loader', + options: { + configFile: 'tsconfig.esm.json', + }, + exclude: /node_modules/, + }, + { + test: /\.wasm$/, + type: 'asset/inline', + }, + ], + }, + devServer: { + static: join(__dirname, 'dist'), + }, + resolve: { + ...config.resolve, + alias: { + fs: 'memfs', + }, + }, +}; + +const nodeConfig: webpack.Configuration = { + name: 'node', + entry: './src/index.cts', + ...config, + output: { + ...config.output, + path: resolve(__dirname, 'dist/node'), + library: { + type: 'commonjs2', + }, + }, + target: 'node', + plugins: [ + new WasmPackPlugin({ + crateDirectory: resolve(__dirname), + outDir: resolve(__dirname, './build/cjs'), + extraArgs: '--target nodejs', + forceMode: process.env.WASM_OPT === 'true' ? 'production' : 'development', + }), + new CopyWebpackPlugin({ + patterns: [ + { + from: resolve(__dirname, './build/cjs/index_bg.wasm'), + to: resolve(__dirname, 'dist/node/index_bg.wasm'), + }, + ], + }), + ], + module: { + rules: [ + { + test: /.c?ts$/, + loader: 'ts-loader', + options: { + configFile: 'tsconfig.webpack.json', + }, + exclude: /node_modules/, + }, + { + test: /\.wasm$/, + type: 'webassembly/async', + }, + ], + }, +}; + +export default [webConfig, nodeConfig]; diff --git a/cspell.json b/cspell.json index e02e68871bb..0547b956d72 100644 --- a/cspell.json +++ b/cspell.json @@ -1,35 +1,53 @@ { "version": "0.2", + "dictionaries": [ + "rust" + ], "words": [ "aarch", "acir", "acvm", "aeiou", "appender", + "Arbitrum", "arithmetization", "arity", "arkworks", "arraysort", + "barebones", "barretenberg", "bincode", "bindgen", "bitand", "blackbox", + "bridgekeeper", "brillig", + "bytecount", "cachix", "callsite", "callsites", + "callstack", + "callstacks", "canonicalize", "castable", + "catmcgee", + "Celo", "chumsky", - "clippy", "codegen", + "codegenned", "codegens", + "Codespaces", "codespan", "coeff", "combinators", "comptime", + "cpus", "cranelift", + "critesjosh", + "csat", + "curvegroup", + "databus", + "deflater", "deflatten", "deflattened", "deflattening", @@ -40,26 +58,31 @@ "desugared", "direnv", "eddsa", + "Elligator", "endianness", "envrc", "Flamegraph", "flate", "fmtstr", "foldl", + "foos", "forall", "foralls", "formatcp", + "frontends", "fxhash", "getrandom", "gloo", "grumpkin", "Guillaume", + "gzipped", "hasher", "hexdigit", "higher-kinded", "Hindley-Milner", "idents", "impls", + "indexmap", "injective", "Inlines", "interner", @@ -68,49 +91,68 @@ "jmpif", "jmpifs", "jmps", + "jsdoc", + "Jubjub", "keccak", + "keccakf", "krate", "lvalue", + "Maddiaa", + "mathbb", "merkle", "metas", + "minreq", "monomorphization", "monomorphize", "monomorphized", "monomorphizer", "monomorphizes", + "monomorphizing", + "montcurve", "nand", "nargo", + "neovim", "newtype", + "nightlies", "nixpkgs", "noirc", + "noirfmt", + "noirjs", "noirup", "nomicfoundation", + "noncanonical", "pedersen", "peekable", "plonkc", + "PLONKish", "pprof", "preprocess", "prettytable", - "println", "printstd", "pseudocode", + "pubkey", "quantile", + "repr", + "reqwest", + "rfind", "rustc", "rustup", "schnorr", "sdiv", "secp256k1", "secp256r1", - "serde", "signedness", + "signorecello", "smol", "splitn", "srem", "stdlib", - "struct", + "structs", "subexpression", "subshell", "subtyping", + "swcurve", + "tecurve", "tempdir", "tempfile", "termcolor", @@ -125,7 +167,17 @@ "unnormalized", "unoptimized", "urem", + "USERPROFILE", "vecmap", - "wasi" + "wasi", + "wasmer", + "Weierstraß", + "zshell", + "nouner", + "devcontainer" + ], + "ignorePaths": [ + "./**/node_modules/**", + "./**/target/**" ] } diff --git a/deny.toml b/deny.toml index d9ffd4d37f0..5edce08fb70 100644 --- a/deny.toml +++ b/deny.toml @@ -67,6 +67,7 @@ exceptions = [ # so we prefer to not have dependencies using it # https://tldrlegal.com/license/creative-commons-cc0-1.0-universal { allow = ["CC0-1.0"], name = "more-asserts" }, + { allow = ["CC0-1.0"], name = "jsonrpc" }, { allow = ["MPL-2.0"], name = "sized-chunks" }, { allow = ["MPL-2.0"], name = "webpki-roots" }, diff --git a/docs/.gitignore b/docs/.gitignore index e4abc8785c7..4f6eee8284e 100644 --- a/docs/.gitignore +++ b/docs/.gitignore @@ -22,3 +22,4 @@ yarn-debug.log* yarn-error.log* package-lock.json +versions.json diff --git a/docs/.yarnrc.yml b/docs/.yarnrc.yml index 3186f3f0795..f703d01801b 100644 --- a/docs/.yarnrc.yml +++ b/docs/.yarnrc.yml @@ -1 +1,4 @@ nodeLinker: node-modules +logFilters: + - code: YN0013 + level: discard diff --git a/docs/docs/explainers/explainer-oracle.md b/docs/docs/explainers/explainer-oracle.md new file mode 100644 index 00000000000..b84ca5dd986 --- /dev/null +++ b/docs/docs/explainers/explainer-oracle.md @@ -0,0 +1,57 @@ +--- +title: Oracles +description: This guide provides an in-depth understanding of how Oracles work in Noir programming. Learn how to use outside calculations in your programs, constrain oracles, and understand their uses and limitations. +keywords: + - Noir Programming + - Oracles + - JSON-RPC + - Foreign Call Handlers + - Constrained Functions + - Blockchain Programming +sidebar_position: 1 +--- + +If you've seen "The Matrix" you may recall "The Oracle" as Gloria Foster smoking cigarettes and baking cookies. While she appears to "know things", she is actually providing a calculation of a pre-determined future. Noir Oracles are similar, in a way. They don't calculate the future (yet), but they allow you to use outside calculations in your programs. + +![matrix oracle prediction](@site/static/img/memes/matrix_oracle.jpeg) + +A Noir program is usually self-contained. You can pass certain inputs to it, and it will generate a deterministic output for those inputs. But what if you wanted to defer some calculation to an outside process or source? + +Oracles are functions that provide this feature. + +## Use cases + +An example usage for Oracles is proving something on-chain. For example, proving that the ETH-USDC quote was below a certain target at a certain block time. Or even making more complex proofs like proving the ownership of an NFT as an anonymous login method. + +Another interesting use case is to defer expensive calculations to be made outside of the Noir program, and then constraining the result; similar to the use of [unconstrained functions](../noir/concepts//unconstrained.md). + +In short, anything that can be constrained in a Noir program but needs to be fetched from an external source is a great candidate to be used in oracles. + +## Constraining oracles + +Just like in The Matrix, Oracles are powerful. But with great power, comes great responsibility. Just because you're using them in a Noir program doesn't mean they're true. Noir has no superpowers. If you want to prove that Portugal won the Euro Cup 2016, you're still relying on potentially untrusted information. + +To give a concrete example, Alice wants to login to the [NounsDAO](https://nouns.wtf/) forum with her username "noir_nouner" by proving she owns a noun without revealing her ethereum address. Her Noir program could have a oracle call like this: + +```rust +#[oracle(getNoun)] +unconstrained fn get_noun(address: Field) -> Field +``` + +This oracle could naively resolve with the number of Nouns she possesses. However, it is useless as a trusted source, as the oracle could resolve to anything Alice wants. In order to make this oracle call actually useful, Alice would need to constrain the response from the oracle, by proving her address and the noun count belongs to the state tree of the contract. + +In short, **Oracles don't prove anything. Your Noir program does.** + +:::danger + +If you don't constrain the return of your oracle, you could be clearly opening an attack vector on your Noir program. Make double-triple sure that the return of an oracle call is constrained! + +::: + +## How to use Oracles + +On CLI, Nargo resolves oracles by making JSON RPC calls, which means it would require an RPC node to be running. + +In JavaScript, NoirJS accepts and resolves arbitrary call handlers (that is, not limited to JSON) as long as they matches the expected types the developer defines. Refer to [Foreign Call Handler](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) to learn more about NoirJS's call handling. + +If you want to build using oracles, follow through to the [oracle guide](../how_to/how-to-oracles.md) for a simple example on how to do that. diff --git a/docs/docs/explainers/explainer-recursion.md b/docs/docs/explainers/explainer-recursion.md new file mode 100644 index 00000000000..8f992ec29fd --- /dev/null +++ b/docs/docs/explainers/explainer-recursion.md @@ -0,0 +1,177 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation Objects", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +As a solution, Alice proposes the following: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". + +She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +If the vote collector puts all of the votes into a [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree), everyone can prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He might find it more efficient to generate a proof for that setup phase separately, and verify that proof recursively in the actual business logic section of his circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof +- The input aggregation object + +It also returns the `output aggregation object`. These aggregation objects can be confusing at times, so let's dive in a little bit. + +### Aggregation objects + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he is verifying a proof, so it needs to output an `aggregation object`: he is generating a recursive proof! +- Alice verifies Bob's *recursive proof*, and uses Bob's `output aggregation object` as the `input aggregation object` in her proof... Which in turn, generates another `output aggregation object`. + +One should notice that when Bob generates his first proof, he has no input aggregation object. Because he is not verifying an recursive proof, he has no `input aggregation object`. In this case, he may use zeros instead. + +We can imagine the `aggregation object` as the baton in a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. diff --git a/docs/docs/getting_started/create_a_project.md b/docs/docs/getting_started/create_a_project.md index 76bed5be9b8..26ff265c389 100644 --- a/docs/docs/getting_started/create_a_project.md +++ b/docs/docs/getting_started/create_a_project.md @@ -69,7 +69,7 @@ x : Field, y : pub Field Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the keyword `pub` (e.g. `y`). To learn more about private and public values, check the -[Data Types](@site/docs/explanations/noir/data_types/index.md) section. +[Data Types](../noir/concepts/data_types/index.md) section. The next line of the program specifies its body: @@ -79,7 +79,7 @@ assert(x != y); The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. -For more Noir syntax, check the [Language Concepts](@site/docs/explanations/noir/comments.md) chapter. +For more Noir syntax, check the [Language Concepts](../noir/concepts/comments.md) chapter. ## Build In/Output Files @@ -139,4 +139,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](@site/docs/getting_started/project_breakdown.md), we will go into more detail on each step performed. +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/docs/getting_started/installation/index.md b/docs/docs/getting_started/installation/index.md index ddb8a250eb4..27eeeca88ed 100644 --- a/docs/docs/getting_started/installation/index.md +++ b/docs/docs/getting_started/installation/index.md @@ -1,7 +1,7 @@ --- title: Nargo Installation description: - nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo though the most common and easy method, noirup + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo through the most common and easy method, noirup keywords: [ Nargo Noir @@ -41,5 +41,5 @@ noirup Done. That's it. You should have the latest version working. You can check with `nargo --version`. You can also install nightlies, specific versions -or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +or branches. Check out the [noirup repository](https://github.com/noir-lang/noirup) for more information. diff --git a/docs/docs/getting_started/installation/other_install_methods.md b/docs/docs/getting_started/installation/other_install_methods.md index d90a10103d9..a532f83750e 100644 --- a/docs/docs/getting_started/installation/other_install_methods.md +++ b/docs/docs/getting_started/installation/other_install_methods.md @@ -24,7 +24,7 @@ sidebar_position: 1 ## Installation -The most common method of installing Nargo is through [Noirup](@site/docs/getting_started/installation/index.md) +The most common method of installing Nargo is through [Noirup](./index.md) However, there are other methods for installing Nargo: @@ -83,9 +83,9 @@ Check if the installation was successful by running `nargo --version`. You shoul ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -163,11 +163,11 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. -step 2: Follow the [Noirup instructions](@site/docs/getting_started/installation/index.md). +step 2: Follow the [Noirup instructions](./index.md). ## Uninstalling Nargo diff --git a/docs/docs/getting_started/project_breakdown.md b/docs/docs/getting_started/project_breakdown.md index 026127b9ed0..c4e2a9ae003 100644 --- a/docs/docs/getting_started/project_breakdown.md +++ b/docs/docs/getting_started/project_breakdown.md @@ -40,7 +40,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" @@ -52,7 +52,7 @@ license = "MIT" ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} ``` -Nargo.toml for a [workspace](@site/docs/explanations/modules_packages_crates/workspaces.md) will look a bit different. For example: +Nargo.toml for a [workspace](../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: ```toml [workspace] @@ -75,7 +75,7 @@ The package section requires a number of fields including: #### Dependencies section -This is where you will specify any dependencies for your project. See the [Dependencies page](@site/docs/explanations/modules_packages_crates/dependencies.md) for more info. +This is where you will specify any dependencies for your project. See the [Dependencies page](../noir/modules_packages_crates/dependencies.md) for more info. `./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or verifier contract respectively. diff --git a/docs/docs/getting_started/tooling/index.mdx b/docs/docs/getting_started/tooling/index.mdx new file mode 100644 index 00000000000..ac480f3c9f5 --- /dev/null +++ b/docs/docs/getting_started/tooling/index.mdx @@ -0,0 +1,38 @@ +--- +title: Tooling +Description: This section provides information about the various tools and utilities available for Noir development. It covers the Noir playground, IDE tools, Codespaces, and community projects. +Keywords: [Noir, Development, Playground, IDE Tools, Language Service Provider, VS Code Extension, Codespaces, noir-starter, Community Projects, Awesome Noir Repository, Developer Tooling] +--- + +Noir is meant to be easy to develop with. For that reason, a number of utilities have been put together to ease the development process as much as feasible in the zero-knowledge world. + +## Playground + +The Noir playground is an easy way to test small ideas, share snippets, and integrate in other websites. You can access it at [play.noir-lang.org](https://play.noir-lang.org). + +## IDE tools + +When you install Nargo, you're also installing a Language Service Provider (LSP), which can be used by IDEs to provide syntax highlighting, codelens, warnings, and more. + +The easiest way to use these tools is by installing the [Noir VS Code extension](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +## Codespaces + +Some Noir repos have leveraged Codespaces in order to ease the development process. You can visit the [noir-starter](https://github.com/noir-lang/noir-starter) for an example. + + + +## GitHub Actions + +You can use `noirup` with GitHub Actions for CI/CD and automated testing. It is as simple as +installing `noirup` and running tests in your GitHub Action `yml` file. + +See the +[config file in the Noir repo](https://github.com/TomAFrench/noir-hashes/blob/master/.github/workflows/noir.yml) for an example usage. + +## Community projects + +As an open-source project, Noir has received many contributions over time. Some of them are related with developer tooling, and you can see some of them in [Awesome Noir repository](https://github.com/noir-lang/awesome-noir#dev-tools) diff --git a/docs/docs/getting_started/tooling/testing.md b/docs/docs/getting_started/tooling/testing.md index 868a061200d..d3e0c522473 100644 --- a/docs/docs/getting_started/tooling/testing.md +++ b/docs/docs/getting_started/tooling/testing.md @@ -24,8 +24,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/docs/how_to/how-to-oracles.md b/docs/docs/how_to/how-to-oracles.md new file mode 100644 index 00000000000..0d84d992320 --- /dev/null +++ b/docs/docs/how_to/how-to-oracles.md @@ -0,0 +1,280 @@ +--- +title: How to use Oracles +description: Learn how to use oracles in your Noir program with examples in both Nargo and NoirJS. This guide also covers writing a JSON RPC server and providing custom foreign call handlers for NoirJS. +keywords: + - Noir Programming + - Oracles + - Nargo + - NoirJS + - JSON RPC Server + - Foreign Call Handlers +sidebar_position: 1 +--- + +This guide shows you how to use oracles in your Noir program. For the sake of clarity, it assumes that: + +- You have read the [explainer on Oracles](../explainers/explainer-oracle.md) and are comfortable with the concept. +- You have a Noir program to add oracles to. You can create one using the [vite-hardhat starter](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat) as a boilerplate. +- You understand the concept of a JSON-RPC server. Visit the [JSON-RPC website](https://www.jsonrpc.org/) if you need a refresher. +- You are comfortable with server-side JavaScript (e.g. Node.js, managing packages, etc.). + +For reference, you can find the snippets used in this tutorial on the [Aztec DevRel Repository](https://github.com/AztecProtocol/dev-rel/tree/main/code-snippets/how-to-oracles). + +## Rundown + +This guide has 3 major steps: + +1. How to modify our Noir program to make use of oracle calls as unconstrained functions +2. How to write a JSON RPC Server to resolve these oracle calls with Nargo +3. How to use them in Nargo and how to provide a custom resolver in NoirJS + +## Step 1 - Modify your Noir program + +An oracle is defined in a Noir program by defining two methods: + +- An unconstrained method - This tells the compiler that it is executing an [unconstrained functions](../noir/concepts//unconstrained.md). +- A decorated oracle method - This tells the compiler that this method is an RPC call. + +An example of an oracle that returns a `Field` would be: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(number: Field) -> Field { } + +unconstrained fn get_sqrt(number: Field) -> Field { + sqrt(number) +} +``` + +In this example, we're wrapping our oracle function in a unconstrained method, and decorating it with `oracle(getSqrt)`. We can then call the unconstrained function as we would call any other function: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); +} +``` + +In the next section, we will make this `getSqrt` (defined on the `sqrt` decorator) be a method of the RPC server Noir will use. + +:::danger + +As explained in the [Oracle Explainer](../explainers/explainer-oracle.md), this `main` function is unsafe unless you constrain its return value. For example: + +```rust +fn main(input: Field) { + let sqrt = get_sqrt(input); + assert(sqrt.pow_32(2) as u64 == input as u64); // <---- constrain the return of an oracle! +} +``` + +::: + +:::info + +Currently, oracles only work with single params or array params. For example: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt([Field; 2]) -> [Field; 2] { } +``` + +::: + +## Step 2 - Write an RPC server + +Brillig will call *one* RPC server. Most likely you will have to write your own, and you can do it in whatever language you prefer. In this guide, we will do it in Javascript. + +Let's use the above example of an oracle that consumes an array with two `Field` and returns their square roots: + +```rust +#[oracle(getSqrt)] +unconstrained fn sqrt(input: [Field; 2]) -> [Field; 2] { } + +unconstrained fn get_sqrt(input: [Field; 2]) -> [Field; 2] { + sqrt(input) +} + +fn main(input: [Field; 2]) { + let sqrt = get_sqrt(input); + assert(sqrt[0].pow_32(2) as u64 == input[0] as u64); + assert(sqrt[1].pow_32(2) as u64 == input[1] as u64); +} +``` + +:::info + +Why square root? + +In general, computing square roots is computationally more expensive than multiplications, which takes a toll when speaking about ZK applications. In this case, instead of calculating the square root in Noir, we are using our oracle to offload that computation to be made in plain. In our circuit we can simply multiply the two values. + +::: + +Now, we should write the correspondent RPC server, starting with the [default JSON-RPC 2.0 boilerplate](https://www.npmjs.com/package/json-rpc-2.0#example): + +```js +import { JSONRPCServer } from "json-rpc-2.0"; +import express from "express"; +import bodyParser from "body-parser"; + +const app = express(); +app.use(bodyParser.json()); + +const server = new JSONRPCServer(); +app.post("/", (req, res) => { + const jsonRPCRequest = req.body; + server.receive(jsonRPCRequest).then((jsonRPCResponse) => { + if (jsonRPCResponse) { + res.json(jsonRPCResponse); + } else { + res.sendStatus(204); + } + }); +}); + +app.listen(5555); +``` + +Now, we will add our `getSqrt` method, as expected by the `#[oracle(getSqrt)]` decorator in our Noir code. It maps through the params array and returns their square roots: + +```js +server.addMethod("getSqrt", async (params) => { + const values = params[0].Array.map(({ inner }) => { + return { inner: `${Math.sqrt(parseInt(inner, 16))}` }; + }); + return { values: [{ Array: values }] }; +}); +``` + +:::tip + +Brillig expects an object with an array of values. Each value is an object declaring to be `Single` or `Array` and returning a `inner` property *as a string*. For example: + +```json +{ "values": [{ "Array": [{ "inner": "1" }, { "inner": "2"}]}]} +{ "values": [{ "Single": { "inner": "1" }}]} +{ "values": [{ "Single": { "inner": "1" }}, { "Array": [{ "inner": "1", { "inner": "2" }}]}]} +``` + +If you're using Typescript, the following types may be helpful in understanding the expected return value and making sure they're easy to follow: + +```js +interface Value { + inner: string, +} + +interface SingleForeignCallParam { + Single: Value, +} + +interface ArrayForeignCallParam { + Array: Value[], +} + +type ForeignCallParam = SingleForeignCallParam | ArrayForeignCallParam; + +interface ForeignCallResult { + values: ForeignCallParam[], +} +``` + +::: + +## Step 3 - Usage with Nargo + +Using the [`nargo` CLI tool](../getting_started/installation/index.md), you can use oracles in the `nargo test`, `nargo execute` and `nargo prove` commands by passing a value to `--oracle-resolver`. For example: + +```bash +nargo test --oracle-resolver http://localhost:5555 +``` + +This tells `nargo` to use your RPC Server URL whenever it finds an oracle decorator. + +## Step 4 - Usage with NoirJS + +In a JS environment, an RPC server is not strictly necessary, as you may want to resolve your oracles without needing any JSON call at all. NoirJS simply expects that you pass a callback function when you generate proofs, and that callback function can be anything. + +For example, if your Noir program expects the host machine to provide CPU pseudo-randomness, you could simply pass it as the `foreignCallHandler`. You don't strictly need to create an RPC server to serve pseudo-randomness, as you may as well get it directly in your app: + +```js +const foreignCallHandler = (name, inputs) => crypto.randomBytes(16) // etc + +await noir.generateFinalProof(inputs, foreignCallHandler) +``` + +As one can see, in NoirJS, the [`foreignCallHandler`](../reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md) function simply means "a callback function that returns a value of type [`ForeignCallOutput`](../reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md). It doesn't have to be an RPC call like in the case for Nargo. + +:::tip + +Does this mean you don't have to write an RPC server like in [Step #2](#step-2---write-an-rpc-server)? + +You don't technically have to, but then how would you run `nargo test` or `nargo prove`? To use both `Nargo` and `NoirJS` in your development flow, you will have to write a JSON RPC server. + +::: + +In this case, let's make `foreignCallHandler` call the JSON RPC Server we created in [Step #2](#step-2---write-an-rpc-server), by making it a JSON RPC Client. + +For example, using the same `getSqrt` program in [Step #1](#step-1---modify-your-noir-program) (comments in the code): + +```js +import { JSONRPCClient } from "json-rpc-2.0"; + +// declaring the JSONRPCClient +const client = new JSONRPCClient((jsonRPCRequest) => { +// hitting the same JSON RPC Server we coded above + return fetch("http://localhost:5555", { + method: "POST", + headers: { + "content-type": "application/json", + }, + body: JSON.stringify(jsonRPCRequest), + }).then((response) => { + if (response.status === 200) { + return response + .json() + .then((jsonRPCResponse) => client.receive(jsonRPCResponse)); + } else if (jsonRPCRequest.id !== undefined) { + return Promise.reject(new Error(response.statusText)); + } + }); +}); + +// declaring a function that takes the name of the foreign call (getSqrt) and the inputs +const foreignCallHandler = async (name, input) => { + // notice that the "inputs" parameter contains *all* the inputs + // in this case we to make the RPC request with the first parameter "numbers", which would be input[0] + const oracleReturn = await client.request(name, [ + { Array: input[0].map((i) => ({ inner: i.toString("hex") })) }, + ]); + return [oracleReturn.values[0].Array.map((x) => x.inner)]; +}; + +// the rest of your NoirJS code +const input = { input: [4, 16] }; +const { witness } = await noir.execute(numbers, foreignCallHandler); +``` + +:::tip + +If you're in a NoirJS environment running your RPC server together with a frontend app, you'll probably hit a familiar problem in full-stack development: requests being blocked by [CORS](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS) policy. For development only, you can simply install and use the [`cors` npm package](https://www.npmjs.com/package/cors) to get around the problem: + +```bash +yarn add cors +``` + +and use it as a middleware: + +```js +import cors from "cors"; + +const app = express(); +app.use(cors()) +``` + +::: + +## Conclusion + +Hopefully by the end of this guide, you should be able to: + +- Write your own logic around Oracles and how to write a JSON RPC server to make them work with your Nargo commands. +- Provide custom foreign call handlers for NoirJS. diff --git a/docs/docs/how_to/how-to-recursion.md b/docs/docs/how_to/how-to-recursion.md new file mode 100644 index 00000000000..39db23f1f3a --- /dev/null +++ b/docs/docs/how_to/how-to-recursion.md @@ -0,0 +1,184 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. This means that it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. This means that these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume these two: + +- `main`: a circuit of type `assert(x != y)` +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateIntermediateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit we are actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*, so it must be Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyIntermediateProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate the intermediate artifacts: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateIntermediateProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]`. However, currently the backend doesn't remove the public inputs from the proof when converting it. + +This means that if your `main` circuit has two public inputs, then you should also modify the recursive circuit to accept a proof with the public inputs appended. This means that in our example, since `y` is a public input, our `proofAsFields` is of type `[Field; 94]`. + +Verification keys in Barretenberg are always of size 114. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, + input_aggregation_object: Array(16).fill(0) // this circuit is verifying a non-recursive proof, so there's no input aggregation object: just use zero +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateFinalProof(witness) +const verified = backend.verifyFinalProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! In that case, you should keep in mind the `returnValue`, as it will contain the `input_aggregation_object` for the next proof. + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { +main: mainJSON, +recursive: recursiveJSON +} +const backends = { +main: new BarretenbergBackend(circuits.main), +recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { +main: new Noir(circuits.main, backends.main), +recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateIntermediateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyIntermediateProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateIntermediateProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) +``` + +::: diff --git a/docs/docs/how_to/03_solidity_verifier.md b/docs/docs/how_to/solidity_verifier.md similarity index 98% rename from docs/docs/how_to/03_solidity_verifier.md rename to docs/docs/how_to/solidity_verifier.md index 1a89fe492f4..8022b0e5f20 100644 --- a/docs/docs/how_to/03_solidity_verifier.md +++ b/docs/docs/how_to/solidity_verifier.md @@ -3,7 +3,7 @@ title: Generate a Solidity Verifier description: Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart - contract. Read more to find out! + contract. Read more to find out keywords: [ solidity verifier, @@ -16,6 +16,7 @@ keywords: proving backend, Barretenberg, ] +sidebar_position: 0 --- For certain applications, it may be desirable to run the verifier as a smart contract instead of on diff --git a/docs/docs/how_to/using-devcontainers.mdx b/docs/docs/how_to/using-devcontainers.mdx new file mode 100644 index 00000000000..727ec6ca667 --- /dev/null +++ b/docs/docs/how_to/using-devcontainers.mdx @@ -0,0 +1,110 @@ +--- +title: Developer Containers and Codespaces +description: "Learn how to set up a devcontainer in your GitHub repository for a seamless coding experience with Codespaces. Follow our easy 8-step guide to create your own Noir environment without installing Nargo locally." +keywords: ["Devcontainer", "Codespaces", "GitHub", "Noir Environment", "Docker Image", "Development Environment", "Remote Coding", "GitHub Codespaces", "Noir Programming", "Nargo", "VSCode Extensions", "Noirup"] +sidebar_position: 1 +--- + +Adding a developer container configuration file to your Noir project is one of the easiest way to unlock coding in browser. + +## What's a devcontainer after all? + +A [Developer Container](https://containers.dev/) (devcontainer for short) is a Docker image that comes preloaded with tools, extensions, and other tools you need to quickly get started or continue a project, without having to install Nargo locally. Think of it as a development environment in a box. + +There are many advantages to this: + +- It's platform and architecture agnostic +- You don't need to have an IDE installed, or Nargo, or use a terminal at all +- It's safer for using on a public machine or public network + +One of the best ways of using devcontainers is... not using your machine at all, for maximum control, performance, and ease of use. +Enter Codespaces. + +## Codespaces + +If a devcontainer is just a Docker image, then what stops you from provisioning a `p3dn.24xlarge` AWS EC2 instance with 92 vCPUs and 768 GiB RAM and using it to prove your 10-gate SNARK proof? + +Nothing! Except perhaps the 30-40$ per hour it will cost you. + +The problem is that provisioning takes time, and I bet you don't want to see the AWS console every time you want to code something real quick. + +Fortunately, there's an easy and free way to get a decent remote machine ready and loaded in less than 2 minutes: Codespaces. [Codespaces is a Github feature](https://github.com/features/codespaces) that allows you to code in a remote machine by using devcontainers, and it's pretty cool: + +- You can start coding Noir in less than a minute +- It uses the resources of a remote machine, so you can code on your grandma's phone if needed be +- It makes it easy to share work with your frens +- It's fully reusable, you can stop and restart whenever you need to + +:::info + +Don't take out your wallet just yet. Free GitHub accounts get about [15-60 hours of coding](https://github.com/features/codespaces) for free per month, depending on the size of your provisioned machine. + +::: + +## Tell me it's _actually_ easy + +It is! + +Github comes with a default codespace and you can use it to code your own devcontainer. That's exactly what we will be doing in this guide. + + + +8 simple steps: + +#### 1. Create a new repository on GitHub. + +#### 2. Click "Start coding with Codespaces". This will use the default image. + +#### 3. Create a folder called `.devcontainer` in the root of your repository. + +#### 4. Create a Dockerfile in that folder, and paste the following code: + +```docker +FROM --platform=linux/amd64 node:lts-bookworm-slim +SHELL ["/bin/bash", "-c"] +RUN apt update && apt install -y curl bash git tar gzip libc++-dev +RUN curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +ENV PATH="/root/.nargo/bin:$PATH" +RUN noirup +ENTRYPOINT ["nargo"] +``` +#### 5. Create a file called `devcontainer.json` in the same folder, and paste the following code: + +```json +{ + "name": "Noir on Codespaces", + "build": { + "context": ".", + "dockerfile": "Dockerfile" + }, + "customizations": { + "vscode": { + "extensions": ["noir-lang.vscode-noir"] + } + } +} +``` +#### 6. Commit and push your changes + +This will pull the new image and build it, so it could take a minute or so + +#### 8. Done! +Just wait for the build to finish, and there's your easy Noir environment. + + +Refer to [noir-starter](https://github.com/noir-lang/noir-starter/) as an example of how devcontainers can be used together with codespaces. + + + +## How do I use it? + +Using the codespace is obviously much easier than setting it up. +Just navigate to your repository and click "Code" -> "Open with Codespaces". It should take a few seconds to load, and you're ready to go. + +:::info + +If you really like the experience, you can add a badge to your readme, links to existing codespaces, and more. +Check out the [official docs](https://docs.github.com/en/codespaces/setting-up-your-project-for-codespaces/setting-up-your-repository/facilitating-quick-creation-and-resumption-of-codespaces) for more info. diff --git a/docs/docs/index.md b/docs/docs/index.md index 754f9f6e31d..ab8c2f8acd2 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -24,12 +24,13 @@ sidebar_position: 0 Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, -Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, -depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's -barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkwork's Marlin -backend, among others. +Noir takes a two-pronged path. First, Noir compiles to an adaptable intermediate language known as ACIR. -This innovative design introduces unique challenges, yet it strategically separates the programming language from the +From there, depending on a given project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend, or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend (among others). + +This innovative design introduces unique challenges; however, this approach also strategically separates the programming language from the backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic programming. @@ -38,13 +39,12 @@ programming. ### Solidity Developers Noir streamlines the creation of Solidity contracts that interface with SNARK systems. -[`Utilize the nargo codegen-verifier`](@site/docs/reference/nargo_commands.md#nargo-codegen-verifier) command to construct verifier +[`Utilize the nargo codegen-verifier`](./reference/nargo_commands.md#nargo-codegen-verifier) command to construct verifier contracts efficiently. While the current alpha version offers this as a direct feature, future updates aim to modularize this process for even greater ease of use. Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be -modularised in the future; however, as of the alpha, you can use the - command to create a verifier contract. +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. ### Protocol Developers @@ -62,7 +62,7 @@ within your projects. ## Libraries -Noir does not currently have an official package manager. You can find a list of available Noir libraries in the +Noir does not currently have an official package manager. You can find a list of some of the available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). Some libraries that are available today include: @@ -82,4 +82,4 @@ Some libraries that are available today include: - [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers -See the section on [dependencies](@site/docs/explanations/modules_packages_crates/dependencies.md) for more information. +See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/docs/migration_notes.md b/docs/docs/migration_notes.md index a5fd10769f7..9f27230a1a0 100644 --- a/docs/docs/migration_notes.md +++ b/docs/docs/migration_notes.md @@ -16,7 +16,7 @@ To update, please make sure this field in `Nargo.toml` matches the output of `na ## ≥0.14 -The index of the [for loops](@site/docs/explanations/noir/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: +The index of the [for loops](noir/concepts/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: ```rust for i in 0..10 { @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/docs/noir/concepts/_category_.json b/docs/docs/noir/concepts/_category_.json new file mode 100644 index 00000000000..7da08f8a8c5 --- /dev/null +++ b/docs/docs/noir/concepts/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Concepts", + "position": 0, + "collapsible": true, + "collapsed": true +} \ No newline at end of file diff --git a/docs/docs/explanations/noir/assert.md b/docs/docs/noir/concepts/assert.md similarity index 100% rename from docs/docs/explanations/noir/assert.md rename to docs/docs/noir/concepts/assert.md diff --git a/docs/docs/noir/concepts/comments.md b/docs/docs/noir/concepts/comments.md new file mode 100644 index 00000000000..b51a85f5c94 --- /dev/null +++ b/docs/docs/noir/concepts/comments.md @@ -0,0 +1,33 @@ +--- +title: Comments +description: + Learn how to write comments in Noir programming language. A comment is a line of code that is + ignored by the compiler, but it can be read by programmers. Single-line and multi-line comments + are supported in Noir. +keywords: [Noir programming language, comments, single-line comments, multi-line comments] +sidebar_position: 10 +--- + +A comment is a line in your codebase which the compiler ignores, however it can be read by +programmers. + +Here is a single line comment: + +```rust +// This is a comment and is ignored +``` + +`//` is used to tell the compiler to ignore the rest of the line. + +Noir also supports multi-line block comments. Start a block comment with `/*` and end the block with `*/`. + +Noir does not natively support doc comments. You may be able to use [Rust doc comments](https://doc.rust-lang.org/reference/comments.html) in your code to leverage some Rust documentation build tools with Noir code. + +```rust +/* + This is a block comment describing a complex function. +*/ +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` diff --git a/docs/docs/explanations/noir/control_flow.md b/docs/docs/noir/concepts/control_flow.md similarity index 100% rename from docs/docs/explanations/noir/control_flow.md rename to docs/docs/noir/concepts/control_flow.md diff --git a/docs/docs/noir/concepts/data_bus.md b/docs/docs/noir/concepts/data_bus.md new file mode 100644 index 00000000000..e54fc861257 --- /dev/null +++ b/docs/docs/noir/concepts/data_bus.md @@ -0,0 +1,21 @@ +--- +title: Data Bus +sidebar_position: 13 +--- +**Disclaimer** this feature is experimental, do not use it! + +The data bus is an optimization that the backend can use to make recursion more efficient. +In order to use it, you must define some inputs of the program entry points (usually the `main()` +function) with the `call_data` modifier, and the return values with the `return_data` modifier. +These modifiers are incompatible with `pub` and `mut` modifiers. + +## Example + +```rust +fn main(mut x: u32, y: call_data u32, z: call_data [u32;4] ) -> return_data u32 { + let a = z[x]; + a+y +} +``` + +As a result, both call_data and return_data will be treated as private inputs and encapsulated into a read-only array each, for the backend to process. diff --git a/docs/docs/explanations/noir/data_types/_category_.json b/docs/docs/noir/concepts/data_types/_category_.json similarity index 100% rename from docs/docs/explanations/noir/data_types/_category_.json rename to docs/docs/noir/concepts/data_types/_category_.json diff --git a/docs/docs/noir/concepts/data_types/arrays.md b/docs/docs/noir/concepts/data_types/arrays.md new file mode 100644 index 00000000000..7f275a2d771 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/arrays.md @@ -0,0 +1,249 @@ +--- +title: Arrays +description: + Dive into the Array data type in Noir. Grasp its methods, practical examples, and best practices for efficiently using Arrays in your Noir code. +keywords: + [ + noir, + array type, + methods, + examples, + indexing, + ] +sidebar_position: 4 +--- + +An array is one way of grouping together values into one compound type. Array types can be inferred +or explicitly specified via the syntax `[; ]`: + +```rust +fn main(x : Field, y : Field) { + let my_arr = [x, y]; + let your_arr: [Field; 2] = [x, y]; +} +``` + +Here, both `my_arr` and `your_arr` are instantiated as an array containing two `Field` elements. + +Array elements can be accessed using indexing: + +```rust +fn main() { + let a = [1, 2, 3, 4, 5]; + + let first = a[0]; + let second = a[1]; +} +``` + +All elements in an array must be of the same type (i.e. homogeneous). That is, an array cannot group +a `Field` value and a `u8` value together for example. + +You can write mutable arrays, like: + +```rust +fn main() { + let mut arr = [1, 2, 3, 4, 5]; + assert(arr[0] == 1); + + arr[0] = 42; + assert(arr[0] == 42); +} +``` + +You can instantiate a new array of a fixed size with the same value repeated for each element. The following example instantiates an array of length 32 where each element is of type Field and has the value 0. + +```rust +let array: [Field; 32] = [0; 32]; +``` + +Like in Rust, arrays in Noir are a fixed size. However, if you wish to convert an array to a [slice](./slices), you can just call `as_slice` on your array: + +```rust +let array: [Field; 32] = [0; 32]; +let sl = array.as_slice() +``` + +You can define multidimensional arrays: + +```rust +let array : [[Field; 2]; 2]; +let element = array[0][0]; +``` +However, multidimensional slices are not supported. For example, the following code will error at compile time: +```rust +let slice : [[Field]] = []; +``` + +## Types + +You can create arrays of primitive types or structs. There is not yet support for nested arrays +(arrays of arrays) or arrays of structs that contain arrays. + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for arrays: + +### len + +Returns the length of an array + +```rust +fn len(_array: [T; N]) -> comptime Field +``` + +example + +```rust +fn main() { + let array = [42, 42]; + assert(array.len() == 2); +} +``` + +### sort + +Returns a new sorted array. The original array remains untouched. Notice that this function will +only work for arrays of fields or integers, not for any arbitrary type. This is because the sorting +logic it uses internally is optimized specifically for these values. If you need a sort function to +sort any type, you should use the function `sort_via` described below. + +```rust +fn sort(_array: [T; N]) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32]; + let sorted = arr.sort(); + assert(sorted == [32, 42]); +} +``` + +### sort_via + +Sorts the array with a custom comparison function + +```rust +fn sort_via(mut a: [T; N], ordering: fn(T, T) -> bool) -> [T; N] +``` + +example + +```rust +fn main() { + let arr = [42, 32] + let sorted_ascending = arr.sort_via(|a, b| a < b); + assert(sorted_ascending == [32, 42]); // verifies + + let sorted_descending = arr.sort_via(|a, b| a > b); + assert(sorted_descending == [32, 42]); // does not verify +} +``` + +### map + +Applies a function to each element of the array, returning a new array containing the mapped elements. + +```rust +fn map(f: fn(T) -> U) -> [U; N] +``` + +example + +```rust +let a = [1, 2, 3]; +let b = a.map(|a| a * 2); // b is now [2, 4, 6] +``` + +### fold + +Applies a function to each element of the array, returning the final accumulated value. The first +parameter is the initial value. + +```rust +fn fold(mut accumulator: U, f: fn(U, T) -> U) -> U +``` + +This is a left fold, so the given function will be applied to the accumulator and first element of +the array, then the second, and so on. For a given call the expected result would be equivalent to: + +```rust +let a1 = [1]; +let a2 = [1, 2]; +let a3 = [1, 2, 3]; + +let f = |a, b| a - b; +a1.fold(10, f) //=> f(10, 1) +a2.fold(10, f) //=> f(f(10, 1), 2) +a3.fold(10, f) //=> f(f(f(10, 1), 2), 3) +``` + +example: + +```rust + +fn main() { + let arr = [2, 2, 2, 2, 2]; + let folded = arr.fold(0, |a, b| a + b); + assert(folded == 10); +} + +``` + +### reduce + +Same as fold, but uses the first element as starting element. + +```rust +fn reduce(f: fn(T, T) -> T) -> T +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let reduced = arr.reduce(|a, b| a + b); + assert(reduced == 10); +} +``` + +### all + +Returns true if all the elements satisfy the given predicate + +```rust +fn all(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 2]; + let all = arr.all(|a| a == 2); + assert(all); +} +``` + +### any + +Returns true if any of the elements satisfy the given predicate + +```rust +fn any(predicate: fn(T) -> bool) -> bool +``` + +example: + +```rust +fn main() { + let arr = [2, 2, 2, 2, 5]; + let any = arr.any(|a| a == 5); + assert(any); +} + +``` diff --git a/docs/docs/explanations/noir/data_types/booleans.md b/docs/docs/noir/concepts/data_types/booleans.md similarity index 100% rename from docs/docs/explanations/noir/data_types/booleans.md rename to docs/docs/noir/concepts/data_types/booleans.md diff --git a/docs/docs/explanations/noir/data_types/fields.md b/docs/docs/noir/concepts/data_types/fields.md similarity index 100% rename from docs/docs/explanations/noir/data_types/fields.md rename to docs/docs/noir/concepts/data_types/fields.md diff --git a/docs/docs/explanations/noir/data_types/function_types.md b/docs/docs/noir/concepts/data_types/function_types.md similarity index 100% rename from docs/docs/explanations/noir/data_types/function_types.md rename to docs/docs/noir/concepts/data_types/function_types.md diff --git a/docs/docs/explanations/noir/data_types/index.md b/docs/docs/noir/concepts/data_types/index.md similarity index 95% rename from docs/docs/explanations/noir/data_types/index.md rename to docs/docs/noir/concepts/data_types/index.md index 56ed55c444b..3c9cd4c2437 100644 --- a/docs/docs/explanations/noir/data_types/index.md +++ b/docs/docs/noir/concepts/data_types/index.md @@ -79,7 +79,7 @@ fn main() { } ``` -Type aliases can also be used with [generics](@site/docs/explanations/noir/generics.md): +Type aliases can also be used with [generics](@site/docs/noir/concepts/generics.md): ```rust type Id = Size; @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/docs/explanations/noir/data_types/integers.md b/docs/docs/noir/concepts/data_types/integers.md similarity index 100% rename from docs/docs/explanations/noir/data_types/integers.md rename to docs/docs/noir/concepts/data_types/integers.md diff --git a/docs/docs/explanations/noir/data_types/references.md b/docs/docs/noir/concepts/data_types/references.md similarity index 100% rename from docs/docs/explanations/noir/data_types/references.md rename to docs/docs/noir/concepts/data_types/references.md diff --git a/docs/docs/explanations/noir/data_types/slices.mdx b/docs/docs/noir/concepts/data_types/slices.mdx similarity index 100% rename from docs/docs/explanations/noir/data_types/slices.mdx rename to docs/docs/noir/concepts/data_types/slices.mdx diff --git a/docs/docs/noir/concepts/data_types/strings.md b/docs/docs/noir/concepts/data_types/strings.md new file mode 100644 index 00000000000..311dfd64416 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/strings.md @@ -0,0 +1,80 @@ +--- +title: Strings +description: + Discover the String data type in Noir. Learn about its methods, see real-world examples, and understand how to effectively manipulate and use Strings in Noir. +keywords: + [ + noir, + string type, + methods, + examples, + concatenation, + ] +sidebar_position: 3 +--- + + +The string type is a fixed length value defined with `str`. + +You can use strings in `assert()` functions or print them with +`println()`. See more about [Logging](../../standard_library/logging). + +```rust +use dep::std; + +fn main(message : pub str<11>, hex_as_string : str<4>) { + println(message); + assert(message == "hello world"); + assert(hex_as_string == "0x41"); +} +``` + +You can convert a `str` to a byte array by calling `as_bytes()` +or a vector by calling `as_bytes_vec()`. + +```rust +fn main() { + let message = "hello world"; + let message_bytes = message.as_bytes(); + let mut message_vec = message.as_bytes_vec(); + assert(message_bytes.len() == 11); + assert(message_bytes[0] == 104); + assert(message_bytes[0] == message_vec.get(0)); +} +``` + +## Escape characters + +You can use escape characters for your strings: + +| Escape Sequence | Description | +|-----------------|-----------------| +| `\r` | Carriage Return | +| `\n` | Newline | +| `\t` | Tab | +| `\0` | Null Character | +| `\"` | Double Quote | +| `\\` | Backslash | + +Example: + +```rust +let s = "Hello \"world" // prints "Hello "world" +let s = "hey \tyou"; // prints "hey you" +``` + +## Raw strings + +A raw string begins with the letter `r` and is optionally delimited by a number of hashes `#`. + +Escape characters are *not* processed within raw strings. All contents are interpreted literally. + +Example: + +```rust +let s = r"Hello world"; +let s = r#"Simon says "hello world""#; + +// Any number of hashes may be used (>= 1) as long as the string also terminates with the same number of hashes +let s = r#####"One "#, Two "##, Three "###, Four "####, Five will end the string."#####; +``` diff --git a/docs/docs/explanations/noir/data_types/structs.md b/docs/docs/noir/concepts/data_types/structs.md similarity index 100% rename from docs/docs/explanations/noir/data_types/structs.md rename to docs/docs/noir/concepts/data_types/structs.md diff --git a/docs/docs/explanations/noir/data_types/tuples.md b/docs/docs/noir/concepts/data_types/tuples.md similarity index 100% rename from docs/docs/explanations/noir/data_types/tuples.md rename to docs/docs/noir/concepts/data_types/tuples.md diff --git a/docs/docs/noir/concepts/data_types/vectors.mdx b/docs/docs/noir/concepts/data_types/vectors.mdx new file mode 100644 index 00000000000..aed13183719 --- /dev/null +++ b/docs/docs/noir/concepts/data_types/vectors.mdx @@ -0,0 +1,171 @@ +--- +title: Vectors +description: Delve into the Vector data type in Noir. Learn about its methods, practical examples, and best practices for using Vectors in your Noir code. +keywords: [noir, vector type, methods, examples, dynamic arrays] +sidebar_position: 6 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A vector is a collection type similar to Rust's Vector type. It's convenient way to use slices as mutable arrays. + +Example: + +```rust +let mut vector: Vec = Vec::new(); +for i in 0..5 { + vector.push(i); +} +assert(vector.len() == 5); +``` + +## Methods + +### new + +Creates a new, empty vector. + +```rust +pub fn new() -> Self { + Self { slice: [] } +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` + +### from_slice + +Creates a vector containing each element from a given slice. Mutations to the resulting vector will not affect the original slice. + +```rust +pub fn from_slice(slice: [T]) -> Self { + Self { slice } +} +``` + +Example: + +```rust +let arr: [Field] = [1, 2, 3]; +let vector_from_slice = Vec::from_slice(arr); +assert(vector_from_slice.len() == 3); +``` + +### get + +Retrieves an element from the vector at a given index. Panics if the index points beyond the vector's end. + +```rust +pub fn get(self, index: Field) -> T { + self.slice[index] +} +``` + +Example: + +```rust +let vector: Vec = Vec::from_slice([10, 20, 30]); +assert(vector.get(1) == 20); +``` + +### push + +Adds a new element to the vector's end, returning a new vector with a length one greater than the original unmodified vector. + +```rust +pub fn push(&mut self, elem: T) { + self.slice = self.slice.push_back(elem); +} +``` + +Example: + +```rust +let mut vector: Vec = Vec::new(); +vector.push(10); +assert(vector.len() == 1); +``` + +### pop + +Removes an element from the vector's end, returning a new vector with a length one less than the original vector, along with the removed element. Panics if the vector's length is zero. + +```rust +pub fn pop(&mut self) -> T { + let (popped_slice, last_elem) = self.slice.pop_back(); + self.slice = popped_slice; + last_elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20]); +let popped_elem = vector.pop(); +assert(popped_elem == 20); +assert(vector.len() == 1); +``` + +### insert + +Inserts an element at a specified index, shifting subsequent elements to the right. + +```rust +pub fn insert(&mut self, index: Field, elem: T) { + self.slice = self.slice.insert(index, elem); +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 30]); +vector.insert(1, 20); +assert(vector.get(1) == 20); +``` + +### remove + +Removes an element at a specified index, shifting subsequent elements to the left, and returns the removed element. + +```rust +pub fn remove(&mut self, index: Field) -> T { + let (new_slice, elem) = self.slice.remove(index); + self.slice = new_slice; + elem +} +``` + +Example: + +```rust +let mut vector = Vec::from_slice([10, 20, 30]); +let removed_elem = vector.remove(1); +assert(removed_elem == 20); +assert(vector.len() == 2); +``` + +### len + +Returns the number of elements in the vector. + +```rust +pub fn len(self) -> Field { + self.slice.len() +} +``` + +Example: + +```rust +let empty_vector: Vec = Vec::new(); +assert(empty_vector.len() == 0); +``` diff --git a/docs/docs/noir/concepts/distinct.md b/docs/docs/noir/concepts/distinct.md new file mode 100644 index 00000000000..6c993b8b5e0 --- /dev/null +++ b/docs/docs/noir/concepts/distinct.md @@ -0,0 +1,64 @@ +--- +title: Distinct Witnesses +sidebar_position: 11 +--- + +The `distinct` keyword prevents repetitions of witness indices in the program's ABI. This ensures +that the witnesses being returned as public inputs are all unique. + +The `distinct` keyword is only used for return values on program entry points (usually the `main()` +function). + +When using `distinct` and `pub` simultaneously, `distinct` comes first. See the example below. + +You can read more about the problem this solves +[here](https://github.com/noir-lang/noir/issues/1183). + +## Example + +Without the `distinct` keyword, the following program + +```rust +fn main(x : pub Field, y : pub Field) -> pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + "return_witnesses": [3, 2, 4, 4] + } +} +``` + +Whereas (with the `distinct` keyword) + +```rust +fn main(x : pub Field, y : pub Field) -> distinct pub [Field; 4] { + let a = 1; + let b = 1; + [x + 1, y, a, b] +} +``` + +compiles to + +```json +{ + //... + "abi": { + //... + "param_witnesses": { "x": [1], "y": [2] }, + //... + "return_witnesses": [3, 4, 5, 6] + } +} +``` diff --git a/docs/docs/explanations/noir/functions.md b/docs/docs/noir/concepts/functions.md similarity index 92% rename from docs/docs/explanations/noir/functions.md rename to docs/docs/noir/concepts/functions.md index 94f929038ee..48aba9cd058 100644 --- a/docs/docs/explanations/noir/functions.md +++ b/docs/docs/noir/concepts/functions.md @@ -15,7 +15,7 @@ To declare a function the `fn` keyword is used. fn foo() {} ``` -By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](@site/docs/explanations/modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: ```rust pub fn foo() {} @@ -62,7 +62,7 @@ fn main(x : [Field]) // can't compile, has variable size fn main(....// i think you got it by now ``` -Keep in mind [tests](@site/docs/getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: +Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: ```rust fn main(x : [Field]) { @@ -189,8 +189,8 @@ Supported attributes include: - **builtin**: the function is implemented by the compiler, for efficiency purposes. - **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` - **field**: Used to enable conditional compilation of code depending on the field size. See below for more details -- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](@site/docs/reference/NoirJS/noir_js/index.md) for more details. -- **test**: mark the function as unit tests. See [Tests](@site/docs/getting_started/tooling/testing.md) for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details ### Field Attribute diff --git a/docs/docs/noir/concepts/generics.md b/docs/docs/noir/concepts/generics.md new file mode 100644 index 00000000000..ddd42bf1f9b --- /dev/null +++ b/docs/docs/noir/concepts/generics.md @@ -0,0 +1,106 @@ +--- +title: Generics +description: Learn how to use Generics in Noir +keywords: [Noir, Rust, generics, functions, structs] +sidebar_position: 7 +--- + +Generics allow you to use the same functions with multiple different concrete data types. You can +read more about the concept of generics in the Rust documentation +[here](https://doc.rust-lang.org/book/ch10-01-syntax.html). + +Here is a trivial example showing the identity function that supports any type. In Rust, it is +common to refer to the most general type as `T`. We follow the same convention in Noir. + +```rust +fn id(x: T) -> T { + x +} +``` + +## In Structs + +Generics are useful for specifying types in structs. For example, we can specify that a field in a +struct will be of a certain generic type. In this case `value` is of type `T`. + +```rust +struct RepeatedValue { + value: T, + count: Field, +} + +impl RepeatedValue { + fn print(self) { + for _i in 0 .. self.count { + println(self.value); + } + } +} + +fn main() { + let repeated = RepeatedValue { value: "Hello!", count: 2 }; + repeated.print(); +} +``` + +The `print` function will print `Hello!` an arbitrary number of times, twice in this case. + +If we want to be generic over array lengths (which are type-level integers), we can use numeric +generics. Using these looks just like using regular generics, but these generics can resolve to +integers at compile-time, rather than resolving to types. Here's an example of a struct that is +generic over the size of the array it contains internally: + +```rust +struct BigInt { + limbs: [u32; N], +} + +impl BigInt { + // `N` is in scope of all methods in the impl + fn first(first: BigInt, second: BigInt) -> Self { + assert(first.limbs != second.limbs); + first + + fn second(first: BigInt, second: Self) -> Self { + assert(first.limbs != second.limbs); + second + } +} +``` + +## Calling functions on generic parameters + +Since a generic type `T` can represent any type, how can we call functions on the underlying type? +In other words, how can we go from "any type `T`" to "any type `T` that has certain methods available?" + +This is what [traits](../concepts/traits) are for in Noir. Here's an example of a function generic over +any type `T` that implements the `Eq` trait for equality: + +```rust +fn first_element_is_equal(array1: [T; N], array2: [T; N]) -> bool + where T: Eq +{ + if (array1.len() == 0) | (array2.len() == 0) { + true + } else { + array1[0] == array2[0] + } +} + +fn main() { + assert(first_element_is_equal([1, 2, 3], [1, 5, 6])); + + // We can use first_element_is_equal for arrays of any type + // as long as we have an Eq impl for the types we pass in + let array = [MyStruct::new(), MyStruct::new()]; + assert(array_eq(array, array, MyStruct::eq)); +} + +impl Eq for MyStruct { + fn eq(self, other: MyStruct) -> bool { + self.foo == other.foo + } +} +``` + +You can find more details on traits and trait implementations on the [traits page](../concepts/traits). diff --git a/docs/docs/noir/concepts/lambdas.md b/docs/docs/noir/concepts/lambdas.md new file mode 100644 index 00000000000..be3c7e0b5ca --- /dev/null +++ b/docs/docs/noir/concepts/lambdas.md @@ -0,0 +1,81 @@ +--- +title: Lambdas +description: Learn how to use anonymous functions in Noir programming language. +keywords: [Noir programming language, lambda, closure, function, anonymous function] +sidebar_position: 9 +--- + +## Introduction + +Lambdas are anonymous functions. The syntax is `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +A block can be used as the body of a lambda, allowing you to declare local variables inside it: + +```rust +let cool = || { + let x = 100; + let y = 100; + x + y +} + +assert(cool() == 200); +``` + +## Closures + +Inside the body of a lambda, you can use variables defined in the enclosing function. Such lambdas are called **closures**. In this example `x` is defined inside `main` and is accessed from within the lambda: + +```rust +fn main() { + let x = 100; + let closure = || x + 150; + assert(closure() == 250); +} +``` + +## Passing closures to higher-order functions + +It may catch you by surprise that the following code fails to compile: + +```rust +fn foo(f: fn () -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // error :( +} +``` + +The reason is that the closure's capture environment affects its type - we have a closure that captures two Fields and `foo` +expects a regular function as an argument - those are incompatible. +:::note + +Variables contained within the `||` are the closure's parameters, and the expression that follows it is the closure's body. The capture environment is comprised of any variables used in the closure's body that are not parameters. + +E.g. in |x| x + y, y would be a captured variable, but x would not be, since it is a parameter of the closure. + +::: +The syntax for the type of a closure is `fn[env](args) -> ret_type`, where `env` is the capture environment of the closure - +in this example that's `(Field, Field)`. + +The best solution in our case is to make `foo` generic over the environment type of its parameter, so that it can be called +with closures with any environment, as well as with regular functions: + +```rust +fn foo(f: fn[Env]() -> Field) -> Field { + f() +} + +fn main() { + let (x, y) = (50, 50); + assert(foo(|| x + y) == 100); // compiles fine + assert(foo(|| 60) == 60); // compiles fine +} +``` diff --git a/docs/docs/noir/concepts/mutability.md b/docs/docs/noir/concepts/mutability.md new file mode 100644 index 00000000000..9cc10429cb4 --- /dev/null +++ b/docs/docs/noir/concepts/mutability.md @@ -0,0 +1,93 @@ +--- +title: Mutability +description: + Learn about mutable variables, constants, and globals in Noir programming language. Discover how + to declare, modify, and use them in your programs. +keywords: [noir programming language, mutability in noir, mutable variables, constants, globals] +sidebar_position: 8 +--- + +Variables in noir can be declared mutable via the `mut` keyword. Mutable variables can be reassigned +to via an assignment expression. + +```rust +let x = 2; +x = 3; // error: x must be mutable to be assigned to + +let mut y = 3; +let y = 4; // OK +``` + +The `mut` modifier can also apply to patterns: + +```rust +let (a, mut b) = (1, 2); +a = 11; // error: a must be mutable to be assigned to +b = 12; // OK + +let mut (c, d) = (3, 4); +c = 13; // OK +d = 14; // OK + +// etc. +let MyStruct { x: mut y } = MyStruct { x: a }; +// y is now in scope +``` + +Note that mutability in noir is local and everything is passed by value, so if a called function +mutates its parameters then the parent function will keep the old value of the parameters. + +```rust +fn main() -> pub Field { + let x = 3; + helper(x); + x // x is still 3 +} + +fn helper(mut x: i32) { + x = 4; +} +``` + +## Comptime Values + +:::warning + +The 'comptime' keyword was removed in version 0.10. The comptime keyword and syntax are currently still kept and parsed for backwards compatibility, but are now deprecated and will issue a warning when used. `comptime` has been removed because it is no longer needed for accessing arrays. + +::: + +## Globals + +Noir also supports global variables. However, they must be known at compile-time. The global type can also be inferred by the compiler entirely. Globals can also be used to specify array +annotations for function parameters and can be imported from submodules. + +```rust +global N: Field = 5; // Same as `global N: Field = 5` + +fn main(x : Field, y : [Field; N]) { + let res = x * N; + + assert(res == y[0]); + + let res2 = x * my_submodule::N; + assert(res != res2); +} + +mod my_submodule { + use dep::std; + + global N: Field = 10; + + fn my_helper() -> Field { + let x = N; + x + } +} +``` + +## Why only local mutability? + +Witnesses in a proving system are immutable in nature. Noir aims to _closely_ mirror this setting +without applying additional overhead to the user. Modeling a mutable reference is not as +straightforward as on conventional architectures and would incur some possibly unexpected overhead. diff --git a/docs/docs/explanations/noir/ops.md b/docs/docs/noir/concepts/ops.md similarity index 97% rename from docs/docs/explanations/noir/ops.md rename to docs/docs/noir/concepts/ops.md index 977c8ba1203..60425cb8994 100644 --- a/docs/docs/explanations/noir/ops.md +++ b/docs/docs/noir/concepts/ops.md @@ -63,7 +63,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/docs/noir/concepts/oracles.md b/docs/docs/noir/concepts/oracles.md new file mode 100644 index 00000000000..2e6a6818d48 --- /dev/null +++ b/docs/docs/noir/concepts/oracles.md @@ -0,0 +1,23 @@ +--- +title: Oracles +description: Dive into how Noir supports Oracles via RPC calls, and learn how to declare an Oracle in Noir with our comprehensive guide. +keywords: + - Noir + - Oracles + - RPC Calls + - Unconstrained Functions + - Programming + - Blockchain +sidebar_position: 6 +--- + +Noir has support for Oracles via RPC calls. This means Noir will make an RPC call and use the return value for proof generation. + +Since Oracles are not resolved by Noir, they are [`unconstrained` functions](./unconstrained.md) + +You can declare an Oracle through the `#[oracle()]` flag. Example: + +```rust +#[oracle(get_number_sequence)] +unconstrained fn get_number_sequence(_size: Field) -> [Field] {} +``` diff --git a/docs/docs/noir/concepts/shadowing.md b/docs/docs/noir/concepts/shadowing.md new file mode 100644 index 00000000000..5ce6130d201 --- /dev/null +++ b/docs/docs/noir/concepts/shadowing.md @@ -0,0 +1,44 @@ +--- +title: Shadowing +sidebar_position: 12 +--- + +Noir allows for inheriting variables' values and re-declaring them with the same name similar to Rust, known as shadowing. + +For example, the following function is valid in Noir: + +```rust +fn main() { + let x = 5; + + { + let x = x * 2; + assert (x == 10); + } + + assert (x == 5); +} +``` + +In this example, a variable x is first defined with the value 5. + +The local scope that follows shadows the original x, i.e. creates a local mutable x based on the value of the original x. It is given a value of 2 times the original x. + +When we return to the main scope, x once again refers to just the original x, which stays at the value of 5. + +## Temporal mutability + +One way that shadowing is useful, in addition to ergonomics across scopes, is for temporarily mutating variables. + +```rust +fn main() { + let age = 30; + // age = age + 5; // Would error as `age` is immutable by default. + + let mut age = age + 5; // Temporarily mutates `age` with a new value. + + let age = age; // Locks `age`'s mutability again. + + assert (age == 35); +} +``` diff --git a/docs/docs/noir/concepts/traits.md b/docs/docs/noir/concepts/traits.md new file mode 100644 index 00000000000..ef1445a5907 --- /dev/null +++ b/docs/docs/noir/concepts/traits.md @@ -0,0 +1,389 @@ +--- +title: Traits +description: + Traits in Noir can be used to abstract out a common interface for functions across + several data types. +keywords: [noir programming language, traits, interfaces, generic, protocol] +sidebar_position: 14 +--- + +## Overview + +Traits in Noir are a useful abstraction similar to interfaces or protocols in other languages. Each trait defines +the interface of several methods contained within the trait. Types can then implement this trait by providing +implementations for these methods. For example in the program: + +```rust +struct Rectangle { + width: Field, + height: Field, +} + +impl Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +fn log_area(r: Rectangle) { + println(r.area()); +} +``` + +We have a function `log_area` to log the area of a `Rectangle`. Now how should we change the program if we want this +function to work on `Triangle`s as well?: + +```rust +struct Triangle { + width: Field, + height: Field, +} + +impl Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Making `log_area` generic over all types `T` would be invalid since not all types have an `area` method. Instead, we can +introduce a new `Area` trait and make `log_area` generic over all types `T` that implement `Area`: + +```rust +trait Area { + fn area(self) -> Field; +} + +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +We also need to explicitly implement `Area` for `Rectangle` and `Triangle`. We can do that by changing their existing +impls slightly. Note that the parameter types and return type of each of our `area` methods must match those defined +by the `Area` trait. + +```rust +impl Area for Rectangle { + fn area(self) -> Field { + self.width * self.height + } +} + +impl Area for Triangle { + fn area(self) -> Field { + self.width * self.height / 2 + } +} +``` + +Now we have a working program that is generic over any type of Shape that is used! Others can even use this program +as a library with their own types - such as `Circle` - as long as they also implement `Area` for these types. + +## Where Clauses + +As seen in `log_area` above, when we want to create a function or method that is generic over any type that implements +a trait, we can add a where clause to the generic function. + +```rust +fn log_area(shape: T) where T: Area { + println(shape.area()); +} +``` + +It is also possible to apply multiple trait constraints on the same variable at once by combining traits with the `+` +operator. Similarly, we can have multiple trait constraints by separating each with a comma: + +```rust +fn foo(elements: [T], thing: U) where + T: Default + Add + Eq, + U: Bar, +{ + let mut sum = T::default(); + + for element in elements { + sum += element; + } + + if sum == T::default() { + thing.bar(); + } +} +``` + +## Generic Implementations + +You can add generics to a trait implementation by adding the generic list after the `impl` keyword: + +```rust +trait Second { + fn second(self) -> Field; +} + +impl Second for (T, Field) { + fn second(self) -> Field { + self.1 + } +} +``` + +You can also implement a trait for every type this way: + +```rust +trait Debug { + fn debug(self); +} + +impl Debug for T { + fn debug(self) { + println(self); + } +} + +fn main() { + 1.debug(); +} +``` + +### Generic Trait Implementations With Where Clauses + +Where clauses can also be placed on trait implementations themselves to restrict generics in a similar way. +For example, while `impl Foo for T` implements the trait `Foo` for every type, `impl Foo for T where T: Bar` +will implement `Foo` only for types that also implement `Bar`. This is often used for implementing generic types. +For example, here is the implementation for array equality: + +```rust +impl Eq for [T; N] where T: Eq { + // Test if two arrays have the same elements. + // Because both arrays must have length N, we know their lengths already match. + fn eq(self, other: Self) -> bool { + let mut result = true; + + for i in 0 .. self.len() { + // The T: Eq constraint is needed to call == on the array elements here + result &= self[i] == other[i]; + } + + result + } +} +``` + +## Generic Traits + +Traits themselves can also be generic by placing the generic arguments after the trait name. These generics are in +scope of every item within the trait. + +```rust +trait Into { + // Convert `self` to type `T` + fn into(self) -> T; +} +``` + +When implementing generic traits the generic arguments of the trait must be specified. This is also true anytime +when referencing a generic trait (e.g. in a `where` clause). + +```rust +struct MyStruct { + array: [Field; 2], +} + +impl Into<[Field; 2]> for MyStruct { + fn into(self) -> [Field; 2] { + self.array + } +} + +fn as_array(x: T) -> [Field; 2] + where T: Into<[Field; 2]> +{ + x.into() +} + +fn main() { + let array = [1, 2]; + let my_struct = MyStruct { array }; + + assert_eq(as_array(my_struct), array); +} +``` + +## Trait Methods With No `self` + +A trait can contain any number of methods, each of which have access to the `Self` type which represents each type +that eventually implements the trait. Similarly, the `self` variable is available as well but is not required to be used. +For example, we can define a trait to create a default value for a type. This trait will need to return the `Self` type +but doesn't need to take any parameters: + +```rust +trait Default { + fn default() -> Self; +} +``` + +Implementing this trait can be done similarly to any other trait: + +```rust +impl Default for Field { + fn default() -> Field { + 0 + } +} + +struct MyType {} + +impl Default for MyType { + fn default() -> Field { + MyType {} + } +} +``` + +However, since there is no `self` parameter, we cannot call it via the method call syntax `object.method()`. +Instead, we'll need to refer to the function directly. This can be done either by referring to the +specific impl `MyType::default()` or referring to the trait itself `Default::default()`. In the later +case, type inference determines the impl that is selected. + +```rust +let my_struct = MyStruct::default(); + +let x: Field = Default::default(); +let result = x + Default::default(); +``` + +:::warning + +```rust +let _ = Default::default(); +``` + +If type inference cannot select which impl to use because of an ambiguous `Self` type, an impl will be +arbitrarily selected. This occurs most often when the result of a trait function call with no parameters +is unused. To avoid this, when calling a trait function with no `self` or `Self` parameters or return type, +always refer to it via the implementation type's namespace - e.g. `MyType::default()`. +This is set to change to an error in future Noir versions. + +::: + +## Default Method Implementations + +A trait can also have default implementations of its methods by giving a body to the desired functions. +Note that this body must be valid for all types that may implement the trait. As a result, the only +valid operations on `self` will be operations valid for any type or other operations on the trait itself. + +```rust +trait Numeric { + fn add(self, other: Self) -> Self; + + // Default implementation of double is (self + self) + fn double(self) -> Self { + self.add(self) + } +} +``` + +When implementing a trait with default functions, a type may choose to implement only the required functions: + +```rust +impl Numeric for Field { + fn add(self, other: Field) -> Field { + self + other + } +} +``` + +Or it may implement the optional methods as well: + +```rust +impl Numeric for u32 { + fn add(self, other: u32) -> u32 { + self + other + } + + fn double(self) -> u32 { + self * 2 + } +} +``` + +## Impl Specialization + +When implementing traits for a generic type it is possible to implement the trait for only a certain combination +of generics. This can be either as an optimization or because those specific generics are required to implement the trait. + +```rust +trait Sub { + fn sub(self, other: Self) -> Self; +} + +struct NonZero { + value: T, +} + +impl Sub for NonZero { + fn sub(self, other: Self) -> Self { + let value = self.value - other.value; + assert(value != 0); + NonZero { value } + } +} +``` + +## Overlapping Implementations + +Overlapping implementations are disallowed by Noir to ensure Noir's decision on which impl to select is never ambiguous. +This means if a trait `Foo` is already implemented +by a type `Bar` for all `T`, then we cannot also have a separate impl for `Bar` (or any other +type argument). Similarly, if there is an impl for all `T` such as `impl Debug for T`, we cannot create +any more impls to `Debug` for other types since it would be ambiguous which impl to choose for any given +method call. + +```rust +trait Trait {} + +// Previous impl defined here +impl Trait for (A, B) {} + +// error: Impl for type `(Field, Field)` overlaps with existing impl +impl Trait for (Field, Field) {} +``` + +## Trait Coherence + +Another restriction on trait implementations is coherence. This restriction ensures other crates cannot create +impls that may overlap with other impls, even if several unrelated crates are used as dependencies in the same +program. + +The coherence restriction is: to implement a trait, either the trait itself or the object type must be declared +in the crate the impl is in. + +In practice this often comes up when using types provided by libraries. If a library provides a type `Foo` that does +not implement a trait in the standard library such as `Default`, you may not `impl Default for Foo` in your own crate. +While restrictive, this prevents later issues or silent changes in the program if the `Foo` library later added its +own impl for `Default`. If you are a user of the `Foo` library in this scenario and need a trait not implemented by the +library your choices are to either submit a patch to the library or use the newtype pattern. + +### The Newtype Pattern + +The newtype pattern gets around the coherence restriction by creating a new wrapper type around the library type +that we cannot create `impl`s for. Since the new wrapper type is defined in our current crate, we can create +impls for any trait we need on it. + +```rust +struct Wrapper { + foo: dep::some_library::Foo, +} + +impl Default for Wrapper { + fn default() -> Wrapper { + Wrapper { + foo: dep::some_library::Foo::new(), + } + } +} +``` + +Since we have an impl for our own type, the behavior of this code will not change even if `some_library` is updated +to provide its own `impl Default for Foo`. The downside of this pattern is that it requires extra wrapping and +unwrapping of values when converting to and from the `Wrapper` and `Foo` types. diff --git a/docs/docs/explanations/noir/unconstrained.md b/docs/docs/noir/concepts/unconstrained.md similarity index 98% rename from docs/docs/explanations/noir/unconstrained.md rename to docs/docs/noir/concepts/unconstrained.md index 7a61d3953ef..6b3424f7993 100644 --- a/docs/docs/explanations/noir/unconstrained.md +++ b/docs/docs/noir/concepts/unconstrained.md @@ -6,7 +6,7 @@ keywords: [Noir programming language, unconstrained, open] sidebar_position: 5 --- -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/docs/explanations/modules_packages_crates/_category_.json b/docs/docs/noir/modules_packages_crates/_category_.json similarity index 100% rename from docs/docs/explanations/modules_packages_crates/_category_.json rename to docs/docs/noir/modules_packages_crates/_category_.json diff --git a/docs/docs/explanations/modules_packages_crates/crates_and_packages.md b/docs/docs/noir/modules_packages_crates/crates_and_packages.md similarity index 96% rename from docs/docs/explanations/modules_packages_crates/crates_and_packages.md rename to docs/docs/noir/modules_packages_crates/crates_and_packages.md index aae6795b229..760a463094c 100644 --- a/docs/docs/explanations/modules_packages_crates/crates_and_packages.md +++ b/docs/docs/noir/modules_packages_crates/crates_and_packages.md @@ -24,7 +24,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/docs/explanations/modules_packages_crates/dependencies.md b/docs/docs/noir/modules_packages_crates/dependencies.md similarity index 96% rename from docs/docs/explanations/modules_packages_crates/dependencies.md rename to docs/docs/noir/modules_packages_crates/dependencies.md index 57f0f9fd420..a37dc401b7d 100644 --- a/docs/docs/explanations/modules_packages_crates/dependencies.md +++ b/docs/docs/noir/modules_packages_crates/dependencies.md @@ -35,7 +35,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -49,7 +49,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -61,17 +61,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/docs/explanations/modules_packages_crates/modules.md b/docs/docs/noir/modules_packages_crates/modules.md similarity index 96% rename from docs/docs/explanations/modules_packages_crates/modules.md rename to docs/docs/noir/modules_packages_crates/modules.md index f9f15aee8be..ae822a1cff4 100644 --- a/docs/docs/explanations/modules_packages_crates/modules.md +++ b/docs/docs/noir/modules_packages_crates/modules.md @@ -11,7 +11,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/docs/explanations/modules_packages_crates/workspaces.md b/docs/docs/noir/modules_packages_crates/workspaces.md similarity index 100% rename from docs/docs/explanations/modules_packages_crates/workspaces.md rename to docs/docs/noir/modules_packages_crates/workspaces.md diff --git a/docs/docs/explanations/standard_library/_category_.json b/docs/docs/noir/standard_library/_category_.json similarity index 100% rename from docs/docs/explanations/standard_library/_category_.json rename to docs/docs/noir/standard_library/_category_.json diff --git a/docs/docs/noir/standard_library/black_box_fns.md b/docs/docs/noir/standard_library/black_box_fns.md new file mode 100644 index 00000000000..4b1efbd17de --- /dev/null +++ b/docs/docs/noir/standard_library/black_box_fns.md @@ -0,0 +1,45 @@ +--- +title: Black Box Functions +description: Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. +keywords: [noir, black box functions] +--- + +Black box functions are functions in Noir that rely on backends implementing support for specialized constraints. This makes certain zk-snark unfriendly computations cheaper than if they were implemented in Noir. + +:::warning + +It is likely that not all backends will support a particular black box function. + +::: + +Because it is not guaranteed that all backends will support black box functions, it is possible that certain Noir programs won't compile against a particular backend if they use an unsupported black box function. It is possible to fallback to less efficient implementations written in Noir/ACIR in some cases. + +Black box functions are specified with the `#[foreign(black_box_fn)]` attribute. For example, the SHA256 function in the Noir [source code](https://github.com/noir-lang/noir/blob/v0.5.1/noir_stdlib/src/hash.nr) looks like: + +```rust +#[foreign(sha256)] +fn sha256(_input : [u8; N]) -> [u8; 32] {} +``` + +## Function list + +Here is a list of the current black box functions that are supported by UltraPlonk: + +- AES +- [SHA256](./cryptographic_primitives/hashes#sha256) +- [Schnorr signature verification](./cryptographic_primitives/schnorr) +- [Blake2s](./cryptographic_primitives/hashes#blake2s) +- [Pedersen Hash](./cryptographic_primitives/hashes#pedersen_hash) +- [Pedersen Commitment](./cryptographic_primitives/hashes#pedersen_commitment) +- [ECDSA signature verification](./cryptographic_primitives/ecdsa_sig_verification) +- [Fixed base scalar multiplication](./cryptographic_primitives/scalar) +- [Compute merkle root](./merkle_trees#compute_merkle_root) +- AND +- XOR +- RANGE +- [Keccak256](./cryptographic_primitives/hashes#keccak256) +- [Recursive proof verification](./recursion) + +Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. + +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/_category_.json b/docs/docs/noir/standard_library/cryptographic_primitives/_category_.json similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/_category_.json rename to docs/docs/noir/standard_library/cryptographic_primitives/_category_.json diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/ec_primitives.md b/docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md similarity index 98% rename from docs/docs/explanations/standard_library/cryptographic_primitives/ec_primitives.md rename to docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md index 8d573adb3be..d2b42d67b7c 100644 --- a/docs/docs/explanations/standard_library/cryptographic_primitives/ec_primitives.md +++ b/docs/docs/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -72,7 +72,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx rename to docs/docs/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/eddsa.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/eddsa.mdx rename to docs/docs/noir/standard_library/cryptographic_primitives/eddsa.mdx diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx new file mode 100644 index 00000000000..3c5f7f79603 --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/hashes.mdx @@ -0,0 +1,167 @@ +--- +title: Hash methods +description: + Learn about the cryptographic primitives ready to use for any Noir project, including sha256, + blake2s, pedersen, mimc_bn254 and mimc +keywords: + [cryptographic primitives, Noir project, sha256, blake2s, pedersen, mimc_bn254, mimc, hash] +sidebar_position: 0 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## sha256 + +Given an array of bytes, returns the resulting sha256 hash. + +```rust +fn sha256(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::sha256(x); +} +``` + + + +## blake2s + +Given an array of bytes, returns an array with the Blake2 hash + +```rust +fn blake2s(_input : [u8]) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::blake2s(x); +} +``` + + + +## pedersen_hash + +Given an array of Fields, returns the Pedersen hash. + +```rust +fn pedersen_hash(_input : [Field]) -> Field +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::pedersen_hash(x); +} +``` + + + + + +## pedersen_commitment + +Given an array of Fields, returns the Pedersen commitment. + +```rust +fn pedersen_commitment(_input : [Field]) -> [Field; 2] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let commitment = std::hash::pedersen_commitment(x); +} +``` + + + +## keccak256 + +Given an array of bytes (`u8`), returns the resulting keccak hash as an array of 32 bytes +(`[u8; 32]`). Specify a message_size to hash only the first `message_size` bytes +of the input. + +```rust +fn keccak256(_input : [u8; N], _message_size: u32) -> [u8; 32] +``` + +example: + +```rust +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let message_size = 4; + let hash = std::hash::keccak256(x, message_size); +} +``` + + + +## poseidon + +Given an array of Fields, returns a new Field with the Poseidon Hash. Mind that you need to specify +how many inputs are there to your Poseidon function. + +```rust +// example for hash_1, hash_2 accepts an array of length 2, etc +fn hash_1(input: [Field; 1]) -> Field +``` + +example: + +```rust +fn main() +{ + let hash_2 = std::hash::poseidon::bn254::hash_2([1, 2]); + assert(hash2 == 0x115cc0f5e7d690413df64c6b9662e9cf2a3617f2743245519e19607a4417189a); +} +``` + +## mimc_bn254 and mimc + +`mimc_bn254` is `mimc`, but with hardcoded parameters for the BN254 curve. You can use it by +providing an array of Fields, and it returns a Field with the hash. You can use the `mimc` method if +you're willing to input your own constants: + +```rust +fn mimc(x: Field, k: Field, constants: [Field; N], exp : Field) -> Field +``` + +otherwise, use the `mimc_bn254` method: + +```rust +fn mimc_bn254(array: [Field; N]) -> Field +``` + +example: + +```rust + +fn main() { + let x = [163, 117, 178, 149]; // some random bytes + let hash = std::hash::mimc::mimc_bn254(x); +} +``` + +## hash_to_field + +```rust +fn hash_to_field(_input : [Field; N]) -> Field {} +``` + +Calculates the `blake2s` hash of the inputs and returns the hash modulo the field modulus to return +a value which can be represented as a `Field`. + diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/index.md b/docs/docs/noir/standard_library/cryptographic_primitives/index.md similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/index.md rename to docs/docs/noir/standard_library/cryptographic_primitives/index.md diff --git a/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx new file mode 100644 index 00000000000..aa4fb8cbaed --- /dev/null +++ b/docs/docs/noir/standard_library/cryptographic_primitives/scalar.mdx @@ -0,0 +1,28 @@ +--- +title: Scalar multiplication +description: See how you can perform scalar multiplications over a fixed base in Noir +keywords: [cryptographic primitives, Noir project, scalar multiplication] +sidebar_position: 1 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## scalar_mul::fixed_base_embedded_curve + +Performs scalar multiplication over the embedded curve whose coordinates are defined by the +configured noir field. For the BN254 scalar field, this is BabyJubJub or Grumpkin. + +```rust +fn fixed_base_embedded_curve(_input : Field) -> [Field; 2] +``` + +example + +```rust +fn main(x : Field) { + let scal = std::scalar_mul::fixed_base_embedded_curve(x); + println(scal); +} +``` + + diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/schnorr.mdx b/docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/schnorr.mdx rename to docs/docs/noir/standard_library/cryptographic_primitives/schnorr.mdx diff --git a/docs/docs/noir/standard_library/logging.md b/docs/docs/noir/standard_library/logging.md new file mode 100644 index 00000000000..db75ef9f86f --- /dev/null +++ b/docs/docs/noir/standard_library/logging.md @@ -0,0 +1,78 @@ +--- +title: Logging +description: + Learn how to use the println statement for debugging in Noir with this tutorial. Understand the + basics of logging in Noir and how to implement it in your code. +keywords: + [ + noir logging, + println statement, + print statement, + debugging in noir, + noir std library, + logging tutorial, + basic logging in noir, + noir logging implementation, + noir debugging techniques, + rust, + ] +--- + +The standard library provides two familiar statements you can use: `println` and `print`. Despite being a limited implementation of rust's `println!` and `print!` macros, these constructs can be useful for debugging. + +You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). + +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. + +Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: + +```rust +struct Person { + age: Field, + height: Field, +} + +fn main(age: Field, height: Field) { + let person = Person { + age: age, + height: height, + }; + println(person); + println(age + height); + println("Hello world!"); +} +``` + +You can print different types in the same statement (including strings) with a type called `fmtstr`. It can be specified in the same way as a normal string, just prepended with an "f" character: + +```rust + let fmt_str = f"i: {i}, j: {j}"; + println(fmt_str); + + let s = myStruct { y: x, x: y }; + println(s); + + println(f"i: {i}, s: {s}"); + + println(x); + println([x, y]); + + let foo = fooStruct { my_struct: s, foo: 15 }; + println(f"s: {s}, foo: {foo}"); + + println(15); // prints 0x0f, implicit Field + println(-1 as u8); // prints 255 + println(-1 as i8); // prints -1 +``` + +Examples shown above are interchangeable between the two `print` statements: + +```rust +let person = Person { age : age, height : height }; + +println(person); +print(person); + +println("Hello world!"); // Prints with a newline at the end of the input +print("Hello world!"); // Prints the input and keeps cursor on the same line +``` diff --git a/docs/docs/noir/standard_library/merkle_trees.md b/docs/docs/noir/standard_library/merkle_trees.md new file mode 100644 index 00000000000..fa488677884 --- /dev/null +++ b/docs/docs/noir/standard_library/merkle_trees.md @@ -0,0 +1,58 @@ +--- +title: Merkle Trees +description: Learn about Merkle Trees in Noir with this tutorial. Explore the basics of computing a merkle root using a proof, with examples. +keywords: + [ + Merkle trees in Noir, + Noir programming language, + check membership, + computing root from leaf, + Noir Merkle tree implementation, + Merkle tree tutorial, + Merkle tree code examples, + Noir libraries, + pedersen hash., + ] +--- + +## compute_merkle_root + +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). + +```rust +fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field +``` + +example: + +```rust +/** + // these values are for this example only + index = "0" + priv_key = "0x000000000000000000000000000000000000000000000000000000616c696365" + secret = "0x1929ea3ab8d9106a899386883d9428f8256cfedb3c4f6b66bf4aa4d28a79988f" + note_hash_path = [ + "0x1e61bdae0f027b1b2159e1f9d3f8d00fa668a952dddd822fda80dc745d6f65cc", + "0x0e4223f3925f98934393c74975142bd73079ab0621f4ee133cee050a3c194f1a", + "0x2fd7bb412155bf8693a3bd2a3e7581a679c95c68a052f835dddca85fa1569a40" + ] + */ +fn main(index: Field, priv_key: Field, secret: Field, note_hash_path: [Field; 3]) { + + let pubkey = std::scalar_mul::fixed_base_embedded_curve(priv_key); + let pubkey_x = pubkey[0]; + let pubkey_y = pubkey[1]; + let note_commitment = std::hash::pedersen([pubkey_x, pubkey_y, secret]); + + let root = std::merkle::compute_merkle_root(note_commitment[0], index, note_hash_path); + println(root); +} +``` + +To check merkle tree membership: + +1. Include a merkle root as a program input. +2. Compute the merkle root of a given leaf, index and hash path. +3. Assert the merkle roots are equal. + +For more info about merkle trees, see the Wikipedia [page](https://en.wikipedia.org/wiki/Merkle_tree). diff --git a/docs/docs/noir/standard_library/options.md b/docs/docs/noir/standard_library/options.md new file mode 100644 index 00000000000..970c9cfbf11 --- /dev/null +++ b/docs/docs/noir/standard_library/options.md @@ -0,0 +1,97 @@ +--- +title: Option Type +--- + +The `Option` type is a way to express that a value might be present (`Some(T))` or absent (`None`). It's a safer way to handle potential absence of values, compared to using nulls in many other languages. + +```rust +struct Option { + None, + Some(T), +} +``` + +The `Option` type, already imported into your Noir program, can be used directly: + +```rust +fn main() { + let none = Option::none(); + let some = Option::some(3); +} +``` + +See [this test](https://github.com/noir-lang/noir/blob/5cbfb9c4a06c8865c98ff2b594464b037d821a5c/crates/nargo_cli/tests/test_data/option/src/main.nr) for a more comprehensive set of examples of each of the methods described below. + +## Methods + +### none + +Constructs a none value. + +### some + +Constructs a some wrapper around a given value. + +### is_none + +Returns true if the Option is None. + +### is_some + +Returns true of the Option is Some. + +### unwrap + +Asserts `self.is_some()` and returns the wrapped value. + +### unwrap_unchecked + +Returns the inner value without asserting `self.is_some()`. This method can be useful within an if condition when we already know that `option.is_some()`. If the option is None, there is no guarantee what value will be returned, only that it will be of type T for an `Option`. + +### unwrap_or + +Returns the wrapped value if `self.is_some()`. Otherwise, returns the given default value. + +### unwrap_or_else + +Returns the wrapped value if `self.is_some()`. Otherwise, calls the given function to return a default value. + +### map + +If self is `Some(x)`, this returns `Some(f(x))`. Otherwise, this returns `None`. + +### map_or + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns the given default value. + +### map_or_else + +If self is `Some(x)`, this returns `f(x)`. Otherwise, this returns `default()`. + +### and + +Returns None if self is None. Otherwise, this returns `other`. + +### and_then + +If self is None, this returns None. Otherwise, this calls the given function with the Some value contained within self, and returns the result of that call. In some languages this function is called `flat_map` or `bind`. + +### or + +If self is Some, return self. Otherwise, return `other`. + +### or_else + +If self is Some, return self. Otherwise, return `default()`. + +### xor + +If only one of the two Options is Some, return that option. Otherwise, if both options are Some or both are None, None is returned. + +### filter + +Returns `Some(x)` if self is `Some(x)` and `predicate(x)` is true. Otherwise, this returns `None`. + +### flatten + +Flattens an `Option>` into a `Option`. This returns `None` if the outer Option is None. Otherwise, this returns the inner Option. diff --git a/docs/docs/explanations/standard_library/recursion.md b/docs/docs/noir/standard_library/recursion.md similarity index 65% rename from docs/docs/explanations/standard_library/recursion.md rename to docs/docs/noir/standard_library/recursion.md index ff4c63acaa7..67962082a8f 100644 --- a/docs/docs/explanations/standard_library/recursion.md +++ b/docs/docs/noir/standard_library/recursion.md @@ -19,11 +19,7 @@ This is a black box function. Read [this section](./black_box_fns) to learn more ::: -## Aggregation Object - -The purpose of the input aggregation object is a little less clear though (and the output aggregation object that is returned from the `std::verify_proof` method). Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points (thus completing the recursive verification). - -So for example in this circuit: +## Example usage ```rust use dep::std; @@ -37,17 +33,17 @@ fn main( proof_b : [Field; 94], ) -> pub [Field; 16] { let output_aggregation_object_a = std::verify_proof( - verification_key, - proof, - public_inputs, + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), key_hash, input_aggregation_object ); let output_aggregation_object = std::verify_proof( - verification_key, - proof_b, - public_inputs, + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), key_hash, output_aggregation_object_a ); @@ -60,8 +56,6 @@ fn main( } ``` -In this example we have a circuit, that generates proofs A and B, that is being verified in circuit C. Assuming that the proof being passed in is not already a recursive proof, the `input_aggregation_object` will be all zeros. It will then generate an `output_aggregation_object`. This blob of data then becomes the `input_aggregation_object` of the next recursive aggregation we wish to compute. We can see here as the same public inputs, verification key, and key hash are used that we are verifying two proofs generated from the same circuit in this single circuit. `std::verify_proof` returns a `[Field]` because the size of an aggregation object is proof system dependent--in barretenberg, aggregation objects are two G1 points, while in Halo2, the aggregation object is a list of G1 points that is log the circuit size. So for the final step we convert the slice into an array of size 16 because we are generating proofs using UltraPlonk. - ## Parameters ### `verification_key` diff --git a/docs/docs/noir/standard_library/traits.md b/docs/docs/noir/standard_library/traits.md new file mode 100644 index 00000000000..f2960ca5080 --- /dev/null +++ b/docs/docs/noir/standard_library/traits.md @@ -0,0 +1,284 @@ +--- +title: Traits +description: Noir's stdlib provides a few commonly used traits. +keywords: [traits, trait, interface, protocol, default, add, eq] +--- + +## `std::default` + +### `std::default::Default` + +```rust +trait Default { + fn default() -> Self; +} +``` + +Constructs a default value of a type. + +Implementations: +```rust +impl Default for Field { .. } + +impl Default for i8 { .. } +impl Default for i16 { .. } +impl Default for i32 { .. } +impl Default for i64 { .. } + +impl Default for u8 { .. } +impl Default for u16 { .. } +impl Default for u32 { .. } +impl Default for u64 { .. } + +impl Default for () { .. } +impl Default for bool { .. } + +impl Default for [T; N] + where T: Default { .. } + +impl Default for (A, B) + where A: Default, B: Default { .. } + +impl Default for (A, B, C) + where A: Default, B: Default, C: Default { .. } + +impl Default for (A, B, C, D) + where A: Default, B: Default, C: Default, D: Default { .. } + +impl Default for (A, B, C, D, E) + where A: Default, B: Default, C: Default, D: Default, E: Default { .. } +``` + +For primitive integer types, the return value of `default` is `0`. Container +types such as arrays are filled with default values of their element type. + +## `std::cmp` + +### `std::cmp::Eq` + +```rust +trait Eq { + fn eq(self, other: Self) -> bool; +} +``` +Returns `true` if `self` is equal to `other`. Implementing this trait on a type +allows the type to be used with `==` and `!=`. + +Implementations: +```rust +impl Eq for Field { .. } + +impl Eq for i8 { .. } +impl Eq for i16 { .. } +impl Eq for i32 { .. } +impl Eq for i64 { .. } + +impl Eq for u8 { .. } +impl Eq for u16 { .. } +impl Eq for u32 { .. } +impl Eq for u64 { .. } + +impl Eq for () { .. } +impl Eq for bool { .. } + +impl Eq for [T; N] + where T: Eq { .. } + +impl Eq for (A, B) + where A: Eq, B: Eq { .. } + +impl Eq for (A, B, C) + where A: Eq, B: Eq, C: Eq { .. } + +impl Eq for (A, B, C, D) + where A: Eq, B: Eq, C: Eq, D: Eq { .. } + +impl Eq for (A, B, C, D, E) + where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { .. } +``` + +### `std::cmp::Cmp` + +```rust +trait Cmp { + fn cmp(self, other: Self) -> Ordering; +} +``` + +`a.cmp(b)` compares two values returning `Ordering::less()` if `a < b`, +`Ordering::equal()` if `a == b`, or `Ordering::greater()` if `a > b`. +Implementing this trait on a type allows `<`, `<=`, `>`, and `>=` to be +used on values of the type. + +Implementations: + +```rust +impl Ord for u8 { .. } +impl Ord for u16 { .. } +impl Ord for u32 { .. } +impl Ord for u64 { .. } + +impl Ord for i8 { .. } +impl Ord for i16 { .. } +impl Ord for i32 { .. } + +impl Ord for i64 { .. } + +impl Ord for () { .. } +impl Ord for bool { .. } + +impl Ord for [T; N] + where T: Ord { .. } + +impl Ord for (A, B) + where A: Ord, B: Ord { .. } + +impl Ord for (A, B, C) + where A: Ord, B: Ord, C: Ord { .. } + +impl Ord for (A, B, C, D) + where A: Ord, B: Ord, C: Ord, D: Ord { .. } + +impl Ord for (A, B, C, D, E) + where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { .. } +``` + +## `std::ops` + +### `std::ops::Add`, `std::ops::Sub`, `std::ops::Mul`, and `std::ops::Div` + +These traits abstract over addition, subtraction, multiplication, and division respectively. +Implementing these traits for a given type will also allow that type to be used with the corresponding operator +for that trait (`+` for Add, etc) in addition to the normal method names. + +```rust +trait Add { + fn add(self, other: Self) -> Self; +} + +trait Sub { + fn sub(self, other: Self) -> Self; +} + +trait Mul { + fn mul(self, other: Self) -> Self; +} + +trait Div { + fn div(self, other: Self) -> Self; +} +``` + +The implementations block below is given for the `Add` trait, but the same types that implement +`Add` also implement `Sub`, `Mul`, and `Div`. + +Implementations: +```rust +impl Add for Field { .. } + +impl Add for i8 { .. } +impl Add for i16 { .. } +impl Add for i32 { .. } +impl Add for i64 { .. } + +impl Add for u8 { .. } +impl Add for u16 { .. } +impl Add for u32 { .. } +impl Add for u64 { .. } +``` + +### `std::ops::Rem` + +```rust +trait Rem { + fn rem(self, other: Self) -> Self; +} +``` + +`Rem::rem(a, b)` is the remainder function returning the result of what is +left after dividing `a` and `b`. Implementing `Rem` allows the `%` operator +to be used with the implementation type. + +Unlike other numeric traits, `Rem` is not implemented for `Field`. + +Implementations: +```rust +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } +``` + +### `std::ops::{ BitOr, BitAnd, BitXor }` + +```rust +trait BitOr { + fn bitor(self, other: Self) -> Self; +} + +trait BitAnd { + fn bitand(self, other: Self) -> Self; +} + +trait BitXor { + fn bitxor(self, other: Self) -> Self; +} +``` + +Traits for the bitwise operations `|`, `&`, and `^`. + +Implementing `BitOr`, `BitAnd` or `BitXor` for a type allows the `|`, `&`, or `^` operator respectively +to be used with the type. + +The implementations block below is given for the `BitOr` trait, but the same types that implement +`BitOr` also implement `BitAnd` and `BitXor`. + +Implementations: +```rust +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } +``` + +### `std::ops::{ Shl, Shr }` + +```rust +trait Shl { + fn shl(self, other: Self) -> Self; +} + +trait Shr { + fn shr(self, other: Self) -> Self; +} +``` + +Traits for a bit shift left and bit shift right. + +Implementing `Shl` for a type allows the left shift operator (`<<`) to be used with the implementation type. +Similarly, implementing `Shr` allows the right shift operator (`>>`) to be used with the type. + +Note that bit shifting is not currently implemented for signed types. + +The implementations block below is given for the `Shl` trait, but the same types that implement +`Shl` also implement `Shr`. + +Implementations: +```rust +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } +``` diff --git a/docs/docs/explanations/standard_library/zeroed.md b/docs/docs/noir/standard_library/zeroed.md similarity index 100% rename from docs/docs/explanations/standard_library/zeroed.md rename to docs/docs/noir/standard_library/zeroed.md diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/index.md b/docs/docs/reference/NoirJS/backend_barretenberg/index.md index bfbecb52864..e32501acb71 100644 --- a/docs/docs/reference/NoirJS/backend_barretenberg/index.md +++ b/docs/docs/reference/NoirJS/backend_barretenberg/index.md @@ -24,21 +24,22 @@ ## Functions -### flattenPublicInputs() +### publicInputsToWitnessMap() ```ts -flattenPublicInputs(publicInputs): string[] +publicInputsToWitnessMap(publicInputs, abi): WitnessMap ``` #### Parameters | Parameter | Type | | :------ | :------ | -| `publicInputs` | `WitnessMap` | +| `publicInputs` | `string`[] | +| `abi` | `Abi` | #### Returns -`string`[] +`WitnessMap` *** diff --git a/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md index 3eb360a78f1..05cebbc4e94 100644 --- a/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md +++ b/docs/docs/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md @@ -13,7 +13,7 @@ The representation of a proof | Member | Type | Description | | :------ | :------ | :------ | | `proof` | `Uint8Array` | **Description**

An byte array representing the proof | -| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | +| `publicInputs` | `string`[] | **Description**

Public inputs of a proof | *** diff --git a/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md index 0ba5783f0d5..5e3cd53e9d3 100644 --- a/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md +++ b/docs/docs/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -8,7 +8,6 @@ ecdsa_secp256k1_verify( signature): boolean ``` -Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. Verifies a ECDSA signature over the secp256k1 curve. ## Parameters diff --git a/docs/docs/reference/NoirJS/noir_js/index.md b/docs/docs/reference/NoirJS/noir_js/index.md index 8b9e35bc9a1..d600e21b299 100644 --- a/docs/docs/reference/NoirJS/noir_js/index.md +++ b/docs/docs/reference/NoirJS/noir_js/index.md @@ -26,7 +26,7 @@ | :------ | :------ | | [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | | [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | -| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Verifies a ECDSA signature over the secp256k1 curve. | | [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | | [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | | [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | diff --git a/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md b/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md index 3eb360a78f1..05cebbc4e94 100644 --- a/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md +++ b/docs/docs/reference/NoirJS/noir_js/type-aliases/ProofData.md @@ -13,7 +13,7 @@ The representation of a proof | Member | Type | Description | | :------ | :------ | :------ | | `proof` | `Uint8Array` | **Description**

An byte array representing the proof | -| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | +| `publicInputs` | `string`[] | **Description**

Public inputs of a proof | *** diff --git a/docs/docs/reference/nargo_commands.md b/docs/docs/reference/nargo_commands.md index 239e88d9691..fc2671b2bfc 100644 --- a/docs/docs/reference/nargo_commands.md +++ b/docs/docs/reference/nargo_commands.md @@ -162,6 +162,7 @@ Runs the Noir program and prints its return value. | `--print-acir` | Display the ACIR for compiled circuit | | `--deny-warnings` | Treat all warnings as errors | | `--silence-warnings` | Suppress warnings | +| `--oracle-resolver` | JSON RPC url to solve oracle calls | | `-h, --help` | Print help | _Usage_ @@ -188,6 +189,7 @@ Creates a proof for the program. | `--print-acir` | Display the ACIR for compiled circuit | | `--deny-warnings` | Treat all warnings as errors | | `--silence-warnings` | Suppress warnings | +| `--oracle-resolver` | JSON RPC url to solve oracle calls | | `-h, --help` | Print help | ## `nargo verify` @@ -213,7 +215,7 @@ you run `nargo test`. To print `println` statements in tests, use the `--show-ou Takes an optional `--exact` flag which allows you to select tests based on an exact name. -See an example on the [testing page](@site/docs/getting_started/tooling/testing.md). +See an example on the [testing page](../getting_started/tooling/testing.md). ### Options @@ -226,6 +228,7 @@ See an example on the [testing page](@site/docs/getting_started/tooling/testing. | `--print-acir` | Display the ACIR for compiled circuit | | `--deny-warnings` | Treat all warnings as errors | | `--silence-warnings` | Suppress warnings | +| `--oracle-resolver` | JSON RPC url to solve oracle calls | | `-h, --help` | Print help | ## `nargo info` diff --git a/docs/docs/tutorials/noirjs_app.md b/docs/docs/tutorials/noirjs_app.md index e0f674fa09c..9f83def914b 100644 --- a/docs/docs/tutorials/noirjs_app.md +++ b/docs/docs/tutorials/noirjs_app.md @@ -1,37 +1,63 @@ --- -title: Tiny NoirJS app -description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment -keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +title: Building a web app with NoirJS +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment. +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs, app] sidebar_position: 0 --- -NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. +NoirJS is a set of packages meant to work both in a browser and a server environment. In this tutorial, we will build a simple web app using them. From here, you should get an idea on how to proceed with your own Noir projects! You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). -## Before we start +## Setup :::note -Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.19.x matches `noir_js@0.19.x`, etc. -In this guide, we will be pinned to 0.17.0. +In this guide, we will be pinned to 0.19.4. ::: -Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). +Before we start, we want to make sure we have Node and Nargo installed. -First of all, follow the the [Nargo guide](@site/docs/getting_started/installation/index.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: +We start by opening a terminal and executing `node --version`. If we don't get an output like `v20.10.0`, that means node is not installed. Let's do that by following the handy [nvm guide](https://github.com/nvm-sh/nvm?tab=readme-ov-file#install--update-script). -```bash -nargo compile +As for `Nargo`, we can follow the the [Nargo guide](../getting_started/installation/index.md) to install it. If you're lazy, just paste this on a terminal and run `noirup`: + +```sh +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash ``` -Your folder structure should look like: +Easy enough. Onwards! + +## Our project + +ZK is a powerful technology. An app that doesn't reveal one of the inputs to *anyone* is almost unbelievable, yet Noir makes it as easy as a single line of code. + +In fact, it's so simple that it comes nicely packaged in `nargo`. Let's do that! + +### Nargo + +Run: + +```nargo new circuit``` + +And... That's about it. Your program is ready to be compiled and run. + +To compile, let's `cd` into the `circuit` folder to enter our project, and call: + +```nargo compile``` + +This compiles our circuit into `json` format and add it to a new `target` folder. + +:::info + +At this point in the tutorial, your folder structure should look like this: ```tree . -└── circuit +└── circuit <---- our working directory ├── Nargo.toml ├── src │ └── main.nr @@ -39,84 +65,46 @@ Your folder structure should look like: └── circuit.json ``` -## Starting a new project +::: -Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. +### Node and Vite -## Installing dependencies +If you want to explore Nargo, feel free to go on a side-quest now and follow the steps in the +[getting started](../getting_started/create_a_project) guide. However, we want our app to run on the browser, so we need Vite. -We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: +Vite is a powerful tool to generate static websites. While it provides all kinds of features, let's just go barebones with some good old vanilla JS. -```bash -npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 -``` +To do this this, go back to the previous folder (`cd ..`) and create a new vite project by running `npm create vite` and choosing "Vanilla" and "Javascript". -To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: +You should see `vite-project` appear in your root folder. This seems like a good time to `cd` into it and install our NoirJS packages: ```bash -npm i --save-dev vite rollup-plugin-copy +npm i @noir-lang/backend_barretenberg@0.19.4 @noir-lang/noir_js@0.19.4 ``` -Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: - -```json - "start": "vite --open" -``` +:::info -If you want do build a static website, you can also add some build and preview scripts: +At this point in the tutorial, your folder structure should look like this: -```json - "build": "vite build", - "preview": "vite preview" +```tree +. +└── circuit + └── ...etc... +└── vite-project <---- our working directory + └── ...etc... ``` -## Vite plugins +::: -Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. +#### Some cleanup -```js -import { defineConfig } from 'vite'; -import copy from 'rollup-plugin-copy'; -import fs from 'fs'; -import path from 'path'; - -const wasmContentTypePlugin = { - name: 'wasm-content-type-plugin', - configureServer(server) { - server.middlewares.use(async (req, res, next) => { - if (req.url.endsWith('.wasm')) { - res.setHeader('Content-Type', 'application/wasm'); - const newPath = req.url.replace('deps', 'dist'); - const targetPath = path.join(__dirname, newPath); - const wasmContent = fs.readFileSync(targetPath); - return res.end(wasmContent); - } - next(); - }); - }, -}; - -export default defineConfig(({ command }) => { - if (command === 'serve') { - return { - plugins: [ - copy({ - targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], - copySync: true, - hook: 'buildStart', - }), - command === 'serve' ? wasmContentTypePlugin : [], - ], - }; - } +`npx create vite` is amazing but it creates a bunch of files we don't really need for our simple example. Actually, let's just delete everything except for `index.html`, `main.js` and `package.json`. I feel lighter already. - return {}; -}); -``` +![my heart is ready for you, noir.js](../../static/img/memes/titanic.jpeg) ## HTML -Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: +Our app won't run like this, of course. We need some working HTML, at least. Let's open our broken-hearted `index.html` and replace everything with this code snippet: ```html @@ -136,8 +124,12 @@ Here's the simplest HTML with some terrible UI. Create a file called `index.html - -

Very basic Noir app

+ +

Noir app

+
+ + +

Logs

Proof

@@ -146,14 +138,25 @@ Here's the simplest HTML with some terrible UI. Create a file called `index.html ``` +It *could* be a beautiful UI... Depending on which universe you live in. + ## Some good old vanilla Javascript -Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: +Our love for Noir needs undivided attention, so let's just open `main.js` and delete everything (this is where the romantic scenery becomes a bit creepy). + +Start by pasting in this boilerplate code: ```js -document.addEventListener('DOMContentLoaded', async () => { - // here's where the magic happens -}); +const setup = async () => { + await Promise.all([ + import("@noir-lang/noirc_abi").then(module => + module.default(new URL("@noir-lang/noirc_abi/web/noirc_abi_wasm_bg.wasm", import.meta.url).toString()) + ), + import("@noir-lang/acvm_js").then(module => + module.default(new URL("@noir-lang/acvm_js/web/acvm_js_bg.wasm", import.meta.url).toString()) + ) + ]); +} function display(container, msg) { const c = document.getElementById(container); @@ -161,73 +164,83 @@ function display(container, msg) { p.textContent = msg; c.appendChild(p); } + +document.getElementById('submitGuess').addEventListener('click', async () => { + try { + // here's where love happens + } catch(err) { + display("logs", "Oh 💔 Wrong guess") + } +}); + ``` -We can manipulate our website with this little function, so we can see our website working. +The display function doesn't do much. We're simply manipulating our website to see stuff happening. For example, if the proof fails, it will simply log a broken heart 😢 + +As for the `setup` function, it's just a sad reminder that dealing with `wasm` on the browser is not as easy as it should. Just copy, paste, and forget. -## Adding Noir +:::info -If you come from the previous page, your folder structure should look like this: +At this point in the tutorial, your folder structure should look like this: ```tree -├── app.js -├── circuit -│ ├── Nargo.toml -│ ├── src -│ │ └── main.nr -│ └── target -│ └── circuit.json -├── index.html -├── package.json -└── vite.config.js +. +└── circuit + └── ...same as above +└── vite-project + ├── main.js + ├── package.json + └── index.html ``` -You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. +You'll see other files and folders showing up (like `package-lock.json`, `node_modules`) but you shouldn't have to care about those. -## Importing our dependencies +::: + +## Some NoirJS -We're starting with the good stuff now. At the top of the new javascript file, import the packages: +We're starting with the good stuff now. If you've compiled the circuit as described above, you should have a `json` file we want to import at the very top of our `main.js` file: ```ts +import circuit from '../circuit/target/circuit.json'; +``` + +[Noir is backend-agnostic](../index.md#whats-new-about-noir). We write Noir, but we also need a proving backend. That's why we need to import and instantiate the two dependencies we installed above: `BarretenbergBackend` and `Noir`. Let's import them right below: + +```js import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; import { Noir } from '@noir-lang/noir_js'; ``` -We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: +And instantiate them inside our try-catch block: ```ts -import circuit from './circuit/target/circuit.json'; +// try { +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +// } ``` -## Write code - :::note -We're gonna be adding code inside the `document.addEventListener...etc` block: - -```js -// forget stuff here -document.addEventListener('DOMContentLoaded', async () => { - // here's where the magic happens -}); -// forget stuff here -``` +For the remainder of the tutorial, everything will be happening inside the `try` block ::: -Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: +## Our app -```ts -const backend = new BarretenbergBackend(circuit); -const noir = new Noir(circuit, backend); -``` +Now for the app itself. We're capturing whatever is in the input when people press the submit button. Just add this: -## Proving +```js +const x = parseInt(document.getElementById('guessInput').value); +const input = { x, y: 2 }; +``` Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: ```js -const input = { x: 1, y: 2 }; +await setup(); // let's squeeze our wasm inits here + display('logs', 'Generating proof... ⌛'); const proof = await noir.generateFinalProof(input); display('logs', 'Generating proof... ✅'); @@ -236,13 +249,17 @@ display('results', proof.proof); You're probably eager to see stuff happening, so go and run your app now! -From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: +From your terminal, run `npm run dev`. If it doesn't open a browser for you, just visit `localhost:5173`. You should now see the worst UI ever, with an ugly input. ![Getting Started 0](@site/static/img/noir_getting_started_1.png) -If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. +Now, our circuit says `fn main(x: Field, y: pub Field)`. This means only the `y` value is public, and it's hardcoded above: `input = { x, y: 2 }`. In other words, you won't need to send your secret`x` to the verifier! -In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: +By inputting any number other than 2 in the input box and clicking "submit", you should get a valid proof. Otherwise the proof won't even generate correctly. By the way, if you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human ❤️. + +## Verifying + +Time to celebrate, yes! But we shouldn't trust machines so blindly. Let's add these lines to see our proof being verified: ```js display('logs', 'Verifying proof... ⌛'); @@ -250,12 +267,12 @@ const verification = await noir.verifyFinalProof(proof); if (verification) display('logs', 'Verifying proof... ✅'); ``` -By saving, your app will refresh and here's our complete Tiny Noir App! +You have successfully generated a client-side Noir web app! -You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). +![coded app without math knowledge](../../static/img/memes/flextape.jpeg) ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/docusaurus.config.ts b/docs/docusaurus.config.ts index 98ef1fd680c..aacc318f5be 100644 --- a/docs/docusaurus.config.ts +++ b/docs/docusaurus.config.ts @@ -1,3 +1,4 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ import type { Config } from '@docusaurus/types'; const { themes } = require('prism-react-renderer'); @@ -40,13 +41,16 @@ export default { }, blog: false, theme: { - customCss: './src/css/custom.css', + customCss: ['./src/css/custom.css', './src/css/sidebar.css'], }, }, ], ], themeConfig: { + colorMode: { + respectPrefersColorScheme: true, + }, navbar: { logo: { alt: 'Noir Logo', @@ -112,7 +116,7 @@ export default { prism: { theme: lightTheme, darkTheme: darkTheme, - additionalLanguages: ['rust', 'powershell', 'solidity', 'toml'], + additionalLanguages: ['rust', 'powershell', 'solidity', 'toml', 'json', 'bash', 'docker'], }, stylesheets: [ { diff --git a/docs/link-check.config.json b/docs/link-check.config.json new file mode 100644 index 00000000000..68059476958 --- /dev/null +++ b/docs/link-check.config.json @@ -0,0 +1,7 @@ +{ + "ignorePatterns": [ + { + "pattern": "^[^\/]+\/[^\/].*$|^\/[^\/].*$" + } + ] +} diff --git a/docs/package.json b/docs/package.json index 86f15b0a311..1e3efcfe3d1 100644 --- a/docs/package.json +++ b/docs/package.json @@ -5,7 +5,7 @@ "scripts": { "start": "docusaurus start", "build": "yarn version::stables && docusaurus build", - "version::stables": "node --loader ts-node/esm ./scripts/setStable.ts", + "version::stables": "ts-node ./scripts/setStable.ts", "serve": "serve build" }, "dependencies": { @@ -16,7 +16,7 @@ "@noir-lang/noir_js": "workspace:*", "@noir-lang/noirc_abi": "workspace:*", "@noir-lang/types": "workspace:*", - "@signorecello/noir_playground": "^0.6.0", + "@signorecello/noir_playground": "^0.7.0", "axios": "^1.4.0", "clsx": "^1.2.1", "hast-util-is-element": "^1.1.0", diff --git a/docs/scripts/setStable.ts b/docs/scripts/setStable.ts index e23d990763a..0f86c4afd59 100644 --- a/docs/scripts/setStable.ts +++ b/docs/scripts/setStable.ts @@ -1,13 +1,13 @@ +/* eslint-disable @typescript-eslint/no-var-requires */ const fs = require('fs'); const path = require('path'); const axios = require('axios'); +const GITHUB_PAGES = 3; const IGNORE_VERSIONS = ['0.16.0']; -const NUMBER_OF_VERSIONS_TO_SHOW = 4; +const NUMBER_OF_VERSIONS_TO_SHOW = 2; async function main() { - const versionsFile = path.resolve('../versions.json'); - const axiosOpts = { params: { per_page: 100 }, headers: {}, @@ -15,24 +15,41 @@ async function main() { if (process.env.GITHUB_TOKEN) axiosOpts.headers = { Authorization: `token ${process.env.GITHUB_TOKEN}` }; - const { data } = await axios.get('https://api.github.com/repos/noir-lang/noir/releases', axiosOpts); - - const all = data.map((release) => release.tag_name); - console.log('All versions: ', all); - const aztecs = data.filter((release) => release.tag_name.includes('aztec')).map((release) => release.tag_name); - console.log('Removing aztecs: ', aztecs); - const prereleases = data.filter((release) => !release.prerelease).map((release) => release.tag_name); - console.log('Removing prereleases: ', prereleases); - - const stables = data - .filter((release) => !release.prerelease && !release.tag_name.includes('aztec')) - .filter((release) => !IGNORE_VERSIONS.includes(release.tag_name.replace('v', ''))) - .map((release) => release.tag_name) - .slice(0, NUMBER_OF_VERSIONS_TO_SHOW); - - console.log('Stables: ', stables); - - fs.writeFileSync(versionsFile, JSON.stringify(stables, null, 2)); + let stables = []; + console.log('Retrieved versions:'); + + for (let i = 0; i < GITHUB_PAGES; i++) { + const { data } = await axios.get(`https://api.github.com/repos/noir-lang/noir/releases?page=${i + 1}`, axiosOpts); + + console.log(data.map((release) => release.tag_name)); + stables.push( + ...data + .filter( + (release) => + !release.prerelease && !release.tag_name.includes('aztec') && !release.tag_name.includes('aztec'), + ) + .filter((release) => !IGNORE_VERSIONS.includes(release.tag_name.replace('v', ''))) + .map((release) => release.tag_name), + ); + } + + stables = stables.slice(0, NUMBER_OF_VERSIONS_TO_SHOW); + + console.log('Filtered down to stables: ', stables); + + const onlyLatestPatches = []; + const minorsSet = new Set(stables.map((el) => el.split('.')[1])); + for (const minor of minorsSet) { + const minorVersions = stables.filter((el) => el.split('.')[1] === minor); + const max = minorVersions.reduce((prev, current) => { + return prev > current ? prev : current; + }); + onlyLatestPatches.push(max); + } + + console.log('Only latest patches: ', onlyLatestPatches); + + fs.writeFileSync(path.resolve(__dirname, '../versions.json'), JSON.stringify(onlyLatestPatches, null, 2)); } main(); diff --git a/docs/sidebars.js b/docs/sidebars.js index 016ead14a8f..f1e79ba9ebc 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -21,10 +21,15 @@ export default { items: [ { type: 'autogenerated', - dirName: 'explanations', + dirName: 'noir', }, ], }, + { + type: 'html', + value: '
', + defaultStyle: true, + }, { type: 'category', label: 'How To Guides', @@ -35,6 +40,16 @@ export default { }, ], }, + { + type: 'category', + label: 'Explainers', + items: [ + { + type: 'autogenerated', + dirName: 'explainers', + }, + ], + }, { type: 'category', label: 'Tutorials', @@ -50,6 +65,11 @@ export default { label: 'Reference', items: [{ type: 'autogenerated', dirName: 'reference' }], }, + { + type: 'html', + value: '
', + defaultStyle: true, + }, { type: 'doc', id: 'migration_notes', diff --git a/docs/src/css/custom.css b/docs/src/css/custom.css index 53dec741513..c96e9df9832 100644 --- a/docs/src/css/custom.css +++ b/docs/src/css/custom.css @@ -6,45 +6,47 @@ /* You can override the default Infima variables here. */ :root { - --ifm-color-primary: #9f3fff; - --ifm-color-primary-dark: #2f1f49; - --ifm-color-primary-darker: #2f1f49; - --ifm-color-primary-darkest: #2f1f49; - --ifm-color-primary-light: #9f3fff; - --ifm-color-primary-lighter: #9f3fff; - --ifm-color-primary-lightest: #9f3fff; + --ifm-color-primary: #514167; + --ifm-color-primary-dark: #493a5d; + --ifm-color-primary-darker: #453758; + --ifm-color-primary-darkest: #392d48; + --ifm-color-primary-light: #594871; + --ifm-color-primary-lighter: #5d4b76; + --ifm-color-primary-lightest: #695486; --search-local-highlight-color: #2f1f49; --ifm-menu-color-background-active: #f6f8fa; --ifm-code-font-size: 95%; - --ifm-breadcrumb-color-active: white; - --ifm-breadcrumb-item-background-active: #2f1f49; + --ifm-breadcrumb-color-active: #F6FBFC; + --ifm-breadcrumb-item-background-active: #2f1f49; --ifm-heading-color: #2f1f49; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.1); + --ifm-link-color: #B68BE4; } /* For readability concerns, you should choose a lighter palette in dark mode. */ [data-theme='dark'] { - --ifm-color-primary: #f5bda9; - --ifm-color-primary-dark: #f5bda9; - --ifm-color-primary-darker: #f5bda9; - --ifm-color-primary-darkest: #f5bda9; - --ifm-color-primary-light: #f5bda9; - --ifm-color-primary-lighter: #f5bda9; - --ifm-color-primary-lightest: #f5bda9; - - --ifm-heading-color: white; + --ifm-color-primary: #fbc0b4; + --ifm-color-primary-dark: #f99e8b; + --ifm-color-primary-darker: #f88c77; + --ifm-color-primary-darkest: #f45939; + --ifm-color-primary-light: #fde2dd; + --ifm-color-primary-lighter: #fef4f1; + --ifm-color-primary-lightest: #ffffff; + + --ifm-heading-color: #F6FBFC; --ifm-menu-color-background-active: #282a36; --ifm-breadcrumb-color-active: #2f1f49; --ifm-breadcrumb-item-background-active: #f5bda9; --docusaurus-highlighted-code-line-bg: rgba(0, 0, 0, 0.3); + --ifm-link-color: var(--ifm-color-primary); } html[data-theme='dark'] { --search-local-highlight-color: #f5bda9; - --search-local-muted-color: white; + --search-local-muted-color: #F6FBFC; --search-local-hit-active-color: #1b1b1d; - --search-local-hit-color: white; + --search-local-hit-color: #F6FBFC; } [data-theme='dark'] .footer { @@ -65,6 +67,13 @@ html[data-theme='dark'] { --ifm-footer-title-color: #2f1f49; } +[data-theme='light'] #__docusaurus { + background-color: #F6FBFC; +} +[data-theme='dark'] #__docusaurus { + background-color: #161717; +} + .katex-html { display: none; } @@ -91,7 +100,6 @@ html[data-theme='dark'] { justify-content: center; margin: 0 auto; text-align: center; - background: white; border: none; width: 50%; } diff --git a/docs/src/css/sidebar.css b/docs/src/css/sidebar.css new file mode 100644 index 00000000000..3c03c374058 --- /dev/null +++ b/docs/src/css/sidebar.css @@ -0,0 +1,4 @@ +.divider { + border-top: 2px solid #eee; + margin: 0.5em 0; +} diff --git a/docs/src/pages/index.jsx b/docs/src/pages/index.jsx index 8485a730785..d5cbfcba977 100644 --- a/docs/src/pages/index.jsx +++ b/docs/src/pages/index.jsx @@ -38,7 +38,7 @@ export default function Landing() {

Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR - compatible proving system. It's design choices are influenced heavily by Rust and focuses on a simple, + compatible proving system. Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax.

diff --git a/docs/static/img/memes/flextape.jpeg b/docs/static/img/memes/flextape.jpeg new file mode 100644 index 00000000000..65a8992e985 Binary files /dev/null and b/docs/static/img/memes/flextape.jpeg differ diff --git a/docs/static/img/memes/matrix_oracle.jpeg b/docs/static/img/memes/matrix_oracle.jpeg new file mode 100644 index 00000000000..478af092aca Binary files /dev/null and b/docs/static/img/memes/matrix_oracle.jpeg differ diff --git a/docs/static/img/memes/titanic.jpeg b/docs/static/img/memes/titanic.jpeg new file mode 100644 index 00000000000..a29f06ffb93 Binary files /dev/null and b/docs/static/img/memes/titanic.jpeg differ diff --git a/docs/static/img/noir_getting_started_1.png b/docs/static/img/noir_getting_started_1.png index 9de33296e91..beaee6fd3c9 100644 Binary files a/docs/static/img/noir_getting_started_1.png and b/docs/static/img/noir_getting_started_1.png differ diff --git a/docs/static/video/codespaces_showcase.mp4 b/docs/static/video/codespaces_showcase.mp4 new file mode 100644 index 00000000000..191e87b18dc Binary files /dev/null and b/docs/static/video/codespaces_showcase.mp4 differ diff --git a/docs/static/video/how-tos/devcontainer.mp4 b/docs/static/video/how-tos/devcontainer.mp4 new file mode 100644 index 00000000000..91e14ab2aff Binary files /dev/null and b/docs/static/video/how-tos/devcontainer.mp4 differ diff --git a/docs/tsconfig.json b/docs/tsconfig.json index 01b56ec5988..241fcf4b5e3 100644 --- a/docs/tsconfig.json +++ b/docs/tsconfig.json @@ -1,6 +1,7 @@ { "extends": "@docusaurus/tsconfig", "compilerOptions": { - "baseUrl": "." + "baseUrl": ".", + "downlevelIteration": true }, } diff --git a/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md index f4ca361d3c4..0de5597c213 100644 --- a/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.17.0/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment diff --git a/docs/versioned_docs/version-v0.17.0/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.17.0/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.17.0/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.17.0/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md index bc0e742fb4e..e7b1f33b339 100644 --- a/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.17.0/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.17.0/index.md b/docs/versioned_docs/version-v0.17.0/index.md index ddbee58f6e4..2d5e6f4454f 100644 --- a/docs/versioned_docs/version-v0.17.0/index.md +++ b/docs/versioned_docs/version-v0.17.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.17.0/language_concepts/06_generics.md index 9fb4177c2a8..36c2b593fcd 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/06_generics.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/06_generics.md @@ -110,4 +110,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.17.0/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.17.0/migration_notes.md b/docs/versioned_docs/version-v0.17.0/migration_notes.md index 69782cba388..1a81af04b3a 100644 --- a/docs/versioned_docs/version-v0.17.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.17.0/migration_notes.md @@ -42,7 +42,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -70,7 +70,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.17.0/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.17.0/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.17.0/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.17.0/noir_js/getting_started/01_tiny_noir_app.md index 6955f7a1e64..142cd02b94c 100644 --- a/docs/versioned_docs/version-v0.17.0/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.17.0/noir_js/getting_started/01_tiny_noir_app.md @@ -251,6 +251,6 @@ By saving, your app will refresh and here's our complete Tiny Noir App! ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md b/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md index 11fef2bf8b5..3480fbfedad 100644 --- a/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md +++ b/docs/versioned_docs/version-v0.17.0/noir_js/reference/01_noirjs.md @@ -90,7 +90,7 @@ This async method generates a witness and a proof given an object as input. ### Syntax ```js -async generateFinalproof(input) +async generateFinalProof(input) ``` ### Parameters diff --git a/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md b/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md index f444eab1772..958cabd6289 100644 --- a/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md +++ b/docs/versioned_docs/version-v0.17.0/noir_js/reference/02_bb_backend.md @@ -41,7 +41,7 @@ constructor(acirCircuit, (numberOfThreads = 1)); | Parameter | Type | Description | | ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode Tipically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | | `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | ### Usage diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md index c758846b688..a412de19d06 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/black_box_fns.md @@ -42,4 +42,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.17.0/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.17.0/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.17.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.0/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.19.0/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.0/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.0/index.md b/docs/versioned_docs/version-v0.19.0/index.md index 4e2f4043892..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.0/index.md +++ b/docs/versioned_docs/version-v0.19.0/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.0/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.0/migration_notes.md b/docs/versioned_docs/version-v0.19.0/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.0/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.0/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.0/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.0/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md index c51ed61de52..795baa59d59 100644 --- a/docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.19.0/noir_js/getting_started/01_tiny_noir_app.md @@ -255,6 +255,6 @@ You can find the complete app code for this guide [here](https://github.com/noir ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md index ccdd53f2bcc..2e90779ceab 100644 --- a/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/01_noirjs.md @@ -94,7 +94,7 @@ This async method generates a witness and a proof given an object as input. ### Syntax ```js -async generateFinalproof(input) +async generateFinalProof(input) ``` ### Parameters diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md index f444eab1772..958cabd6289 100644 --- a/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/02_bb_backend.md @@ -41,7 +41,7 @@ constructor(acirCircuit, (numberOfThreads = 1)); | Parameter | Type | Description | | ----------------- | ----------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode Tipically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | +| `acirCircuit` | Object | A circuit represented in a `json` format, containing the ABI and bytecode typically obtained by running [`nargo compile`](../../nargo/01_commands.md). This is the same circuit expected to be passed to [the Noir class](01_noirjs.md) | | `numberOfThreads` | Number (optional) | The number of threads to be used by the backend. Defaults to 1. | ### Usage diff --git a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.0/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.0/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.19.0/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.19.0/standard_library/logging.md b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.0/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.0/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.1/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.1/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.1/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.19.1/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.19.1/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.1/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.1/index.md b/docs/versioned_docs/version-v0.19.1/index.md index 4e2f4043892..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.1/index.md +++ b/docs/versioned_docs/version-v0.19.1/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.1/language_concepts/06_generics.md index 9fb4177c2a8..36c2b593fcd 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/06_generics.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/06_generics.md @@ -110,4 +110,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.1/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.1/migration_notes.md b/docs/versioned_docs/version-v0.19.1/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.1/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.1/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.1/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.1/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.1/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.1/noir_js/getting_started/01_tiny_noir_app.md index c51ed61de52..795baa59d59 100644 --- a/docs/versioned_docs/version-v0.19.1/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.19.1/noir_js/getting_started/01_tiny_noir_app.md @@ -255,6 +255,6 @@ You can find the complete app code for this guide [here](https://github.com/noir ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.1/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.1/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.1/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.1/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.2/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.2/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.2/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.19.2/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.19.2/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.2/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.2/index.md b/docs/versioned_docs/version-v0.19.2/index.md index 4e2f4043892..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.2/index.md +++ b/docs/versioned_docs/version-v0.19.2/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.2/language_concepts/06_generics.md index 9fb4177c2a8..36c2b593fcd 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/06_generics.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/06_generics.md @@ -110,4 +110,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.2/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.2/migration_notes.md b/docs/versioned_docs/version-v0.19.2/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.2/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.2/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.2/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.2/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.2/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.2/noir_js/getting_started/01_tiny_noir_app.md index c51ed61de52..795baa59d59 100644 --- a/docs/versioned_docs/version-v0.19.2/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.19.2/noir_js/getting_started/01_tiny_noir_app.md @@ -255,6 +255,6 @@ You can find the complete app code for this guide [here](https://github.com/noir ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.2/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.2/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.2/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.2/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.3/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.19.3/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md index 9a17f5d6360..7a7fb876c35 100644 --- a/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.3/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter'" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.3/index.md b/docs/versioned_docs/version-v0.19.3/index.md index 4e2f4043892..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.3/index.md +++ b/docs/versioned_docs/version-v0.19.3/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md index 9fb4177c2a8..36c2b593fcd 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/06_generics.md @@ -110,4 +110,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.3/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.3/migration_notes.md b/docs/versioned_docs/version-v0.19.3/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.3/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.3/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.3/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.3/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md index c51ed61de52..795baa59d59 100644 --- a/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.19.3/noir_js/getting_started/01_tiny_noir_app.md @@ -255,6 +255,6 @@ You can find the complete app code for this guide [here](https://github.com/noir ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md index a8a0bb451c1..1d7b54a9dca 100644 --- a/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.3/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.3/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.19.3/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.19.3/standard_library/logging.md b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.3/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.3/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md index 349756d60c0..7d3c88c7693 100644 --- a/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md +++ b/docs/versioned_docs/version-v0.19.4/getting_started/00_nargo_installation.md @@ -140,9 +140,9 @@ Commands: ### Option 3: Compile from Source -Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating ssues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). -Combined with direnv, which automatically sets or unsets environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. #### Setting up your environment @@ -220,7 +220,7 @@ Advanced: If you aren't using direnv nor launching your editor within the subshe ### Option 4: WSL (for Windows) -The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed nativerly. However, it is available by using Windows Subsystem for Linux (WSL). +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md index 8b4416beba1..d4daae605a2 100644 --- a/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md +++ b/docs/versioned_docs/version-v0.19.4/getting_started/01_hello_world.md @@ -144,4 +144,4 @@ corresponding error instead. Congratulations, you have now created and verified a proof for your very first Noir program! -In the [next section](breakdown), we will go into more detail on each step performed. +In the [next section](./02_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md index 9a17f5d6360..d28a54a1600 100644 --- a/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md +++ b/docs/versioned_docs/version-v0.19.4/getting_started/02_breakdown.md @@ -39,7 +39,7 @@ Example Nargo.toml: ```toml [package] -name = "noirstarter" +name = "noir_starter" type = "bin" authors = ["Alice"] compiler_version = "0.9.0" diff --git a/docs/versioned_docs/version-v0.19.4/index.md b/docs/versioned_docs/version-v0.19.4/index.md index 4e2f4043892..380368db036 100644 --- a/docs/versioned_docs/version-v0.19.4/index.md +++ b/docs/versioned_docs/version-v0.19.4/index.md @@ -25,7 +25,7 @@ slug: / Noir is a Domain Specific Language for SNARK proving systems. It has been designed to use any ACIR compatible proving system. -It's design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. +Its design choices are influenced heavily by Rust and focuses on a simple, familiar syntax. ## Who is Noir for? @@ -34,7 +34,7 @@ Noir can be used for a variety of purposes. ### Solidity Developers Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will -be modularised in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create +be modularized in the future; however, as of the alpha, you can use the [`nargo codegen-verifier`](./nargo/commands#nargo-codegen-verifier) command to create a verifier contract. ### Protocol Developers @@ -55,7 +55,7 @@ must be implemented for it. Noir is simple and flexible in its design, as it does not compile immediately to a fixed NP-complete language. Instead, Noir compiles to an intermediate language (ACIR), which itself can be compiled -to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkwork's Marlin backend, or others). +to an arithmetic circuit (if choosing to target Aztec's barretenberg backend) or a rank-1 constraint system (if choosing to target an R1CS backend like Arkworks' Marlin backend, or others). This in itself brings up a few challenges within the design process, but allows one to decouple the programming language completely from the backend. This is similar in theory to LLVM. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md index d5caa463765..0e35ef5e584 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/03_ops.md @@ -62,7 +62,7 @@ fn main(x : Field) { Noir has no support for the logical operators `||` and `&&`. This is because encoding the short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can -use the bitwise operators `|` and `&` which operate indentically for booleans, just without the +use the bitwise operators `|` and `&` which operate identically for booleans, just without the short-circuiting. ```rust diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md index 6b621eda3eb..097d6ee9894 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/05_unconstrained.md @@ -7,7 +7,7 @@ keywords: [Noir programming language, unconstrained, open] -Unconstrained functions are functions which do not constrain any of the included computation and allow for non-determinisitic computation. +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. ## Why? diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md b/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md index 9fb4177c2a8..36c2b593fcd 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/06_generics.md @@ -110,4 +110,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md index ad902c42c9b..ed3fed820ec 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/07_mutability.md @@ -69,11 +69,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md index d546cc463a8..74f573f7d3f 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types.md @@ -93,4 +93,4 @@ fn main() { ### BigInt -You can acheive BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md index b1e7ad11bfd..1814365800a 100644 --- a/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md +++ b/docs/versioned_docs/version-v0.19.4/language_concepts/data_types/01_integers.md @@ -107,6 +107,6 @@ Example of how it is used: use dep::std; fn main(x: u8, y: u8) -> pub u8 { - std::wrapping_add(x + y) + std::wrapping_add(x, y) } ``` diff --git a/docs/versioned_docs/version-v0.19.4/migration_notes.md b/docs/versioned_docs/version-v0.19.4/migration_notes.md index 0d7e0af0efd..905bca3d30c 100644 --- a/docs/versioned_docs/version-v0.19.4/migration_notes.md +++ b/docs/versioned_docs/version-v0.19.4/migration_notes.md @@ -50,7 +50,7 @@ If you are using the default barretenberg backend, simply run: nargo prove ``` -with you Noir program. +with your Noir program. This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. @@ -78,7 +78,7 @@ nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/bar This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. -The gzipped filed is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. Then run: diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md index fb83a33d94e..744de72bb2c 100644 --- a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/crates_and_packages.md @@ -23,7 +23,7 @@ _Library crates_ don't have a `main` function and they don't compile down to ACI #### Contracts -Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/src/contracts). +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). ### Crate Root diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md index 75f95aaa305..e91e73a4c4f 100644 --- a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/dependencies.md @@ -34,7 +34,7 @@ If the module is in a subdirectory, you can define a subdirectory in your git re # Nargo.toml [dependencies] -easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/src/contracts/easy_private_token_contract"} +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} ``` ## Specifying a local dependency @@ -48,7 +48,7 @@ For example, this file structure has a library and binary crate │   ├── Nargo.toml │   └── src │   └── main.nr -└── liba +└── lib_a ├── Nargo.toml └── src └── lib.nr @@ -60,17 +60,17 @@ Inside of the binary crate, you can specify: # Nargo.toml [dependencies] -libA = { path = "../liba" } +lib_a = { path = "../lib_a" } ``` ## Importing dependencies You can import a dependency to a Noir file using the following syntax. For example, to import the -ecrecover-noir library and local liba referenced above: +ecrecover-noir library and local lib_a referenced above: ```rust use dep::ecrecover; -use dep::libA; +use dep::lib_a; ``` You can also import only the specific parts of dependency that you want to use, like so: diff --git a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md index 147c9b284e8..11e60cbf35e 100644 --- a/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md +++ b/docs/versioned_docs/version-v0.19.4/modules_packages_crates/modules.md @@ -10,7 +10,7 @@ Noir's module system follows the same convention as the _newer_ version of Rust' ## Purpose of Modules -Modules are used to organise files. Without modules all of your code would need to live in a single +Modules are used to organize files. Without modules all of your code would need to live in a single file. In Noir, the compiler does not automatically scan all of your files to detect modules. This must be done explicitly by the developer. diff --git a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md index da767274efd..5c57ef92705 100644 --- a/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md +++ b/docs/versioned_docs/version-v0.19.4/nargo/02_testing.md @@ -23,8 +23,8 @@ fn test_add() { } ``` -Running `nargo test` will test that the `test_add` function can be executed while satisfying the all -the contraints which allows you to test that add returns the expected values. Test functions can't +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't have any arguments currently. ### Test fail diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md index c51ed61de52..795baa59d59 100644 --- a/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md +++ b/docs/versioned_docs/version-v0.19.4/noir_js/getting_started/01_tiny_noir_app.md @@ -255,6 +255,6 @@ You can find the complete app code for this guide [here](https://github.com/noir ## Further Reading -You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/next-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md index e54116fb1d8..c54468891af 100644 --- a/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md +++ b/docs/versioned_docs/version-v0.19.4/noir_js/reference/noir_js/classes/Noir.md @@ -95,7 +95,7 @@ Generates a witness and a proof given an object as input. #### Example ```typescript -async generateFinalproof(input) +async generateFinalProof(input) ``` *** diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.19.4/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md index 6e6b19b6861..d3af3cf7c3b 100644 --- a/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md +++ b/docs/versioned_docs/version-v0.19.4/standard_library/cryptographic_primitives/04_ec_primitives.md @@ -71,7 +71,7 @@ does indeed lie on `c` by calling `c.contains(p1)`. ## Examples The -[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/crates/nargo_cli/tests/test_data/ec_baby_jubjub/src/main.nr) +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more interesting examples in Noir would be: diff --git a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md index 7e2fd9b9aff..4ba0fe0e656 100644 --- a/docs/versioned_docs/version-v0.19.4/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.19.4/standard_library/logging.md @@ -22,7 +22,7 @@ implementation of rust's `println!` macro, this construct can be useful for debu You can print the output of println statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are println statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). `println` will not work for failed constraints caught at compile time. The `println` statement is unconstrained, so it works for outputting integers, fields, strings, and even structs or expressions. For example: diff --git a/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md b/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md new file mode 100644 index 00000000000..9357d3c7341 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/explainers/explainer-recursion.md @@ -0,0 +1,175 @@ +--- +title: Recursive proofs +description: Explore the concept of recursive proofs in Zero-Knowledge programming. Understand how recursion works in Noir, a language for writing smart contracts on the EVM blockchain. Learn through practical examples like Alice and Bob's guessing game, Charlie's recursive merkle tree, and Daniel's reusable components. Discover how to use recursive proofs to optimize computational resources and improve efficiency. + +keywords: + [ + "Recursive Proofs", + "Zero-Knowledge Programming", + "Noir", + "EVM Blockchain", + "Smart Contracts", + "Recursion in Noir", + "Alice and Bob Guessing Game", + "Recursive Merkle Tree", + "Reusable Components", + "Optimizing Computational Resources", + "Improving Efficiency", + "Verification Key", + "Aggregation Objects", + "Recursive zkSNARK schemes", + "PLONK", + "Proving and Verification Keys" + ] +sidebar_position: 1 +--- + +In programming, we tend to think of recursion as something calling itself. A classic example would be the calculation of the factorial of a number: + +```js +function factorial(n) { + if (n === 0 || n === 1) { + return 1; + } else { + return n * factorial(n - 1); + } +} +``` + +In this case, while `n` is not `1`, this function will keep calling itself until it hits the base case, bubbling up the result on the call stack: + +```md + Is `n` 1? <--------- + /\ / + / \ n = n -1 + / \ / + Yes No -------- +``` + +In Zero-Knowledge, recursion has some similarities. + +It is not a Noir function calling itself, but a proof being used as an input to another circuit. In short, you verify one proof *inside* another proof, returning the proof that both proofs are valid. + +This means that, given enough computational resources, you can prove the correctness of any arbitrary number of proofs in a single proof. This could be useful to design state channels (for which a common example would be [Bitcoin's Lightning Network](https://en.wikipedia.org/wiki/Lightning_Network)), to save on gas costs by settling one proof on-chain, or simply to make business logic less dependent on a consensus mechanism. + +## Examples + +Let us look at some of these examples + +### Alice and Bob - Guessing game + +Alice and Bob are friends, and they like guessing games. They want to play a guessing game online, but for that, they need a trusted third-party that knows both of their secrets and finishes the game once someone wins. + +So, they use zero-knowledge proofs. Alice tries to guess Bob's number, and Bob will generate a ZK proof stating whether she succeeded or failed. + +This ZK proof can go on a smart contract, revealing the winner and even giving prizes. However, this means every turn needs to be verified on-chain. This incurs some cost and waiting time that may simply make the game too expensive or time-consuming to be worth it. + +So, Alice started thinking: "what if Bob generates his proof, and instead of sending it on-chain, I verify it *within* my own proof before playing my own turn?". She can then generate a proof that she verified his proof, and so on. + +```md + Did you fail? <-------------------------- + / \ / + / \ n = n -1 + / \ / + Yes No / + | | / + | | / + | You win / + | / + | / +Generate proof of that / + + / + my own guess ---------------- +``` + +### Charlie - Recursive merkle tree + +Charlie is a concerned citizen, and wants to be sure his vote in an election is accounted for. He votes with a ZK proof, but he has no way of knowing that his ZK proof was included in the total vote count! + +So, the tallier puts all the votes in a merkle tree, and everyone can also prove the verification of two proofs within one proof, as such: + +```md + abcd + __________|______________ + | | + ab cd + _____|_____ ______|______ + | | | | + alice bob charlie daniel +``` + +Doing this recursively allows us to arrive on a final proof `abcd` which if true, verifies the correctness of all the votes. + +### Daniel - Reusable components + +Daniel has a big circuit and a big headache. A part of his circuit is a setup phase that finishes with some assertions that need to be made. But that section alone takes most of the proving time, and is largely independent of the rest of the circuit. + +He could find it more efficient to generate a proof for that setup phase separately, and verifying it in his actual business logic section of the circuit. This will allow for parallelization of both proofs, which results in a considerable speedup. + +## What params do I need + +As you can see in the [recursion reference](noir/standard_library/recursion.md), a simple recursive proof requires: + +- The proof to verify +- The Verification Key of the circuit that generated the proof +- A hash of this verification key, as it's needed for some backends +- The public inputs for the proof +- The input aggregation object + +It also returns the `output aggregation object`. These aggregation objects can be confusing at times, so let's dive in a little bit. + +### Aggregation objects + +Recursive zkSNARK schemes do not necessarily "verify a proof" in the sense that you expect a true or false to be spit out by the verifier. Rather an aggregation object is built over the public inputs. + +In the case of PLONK the recursive aggregation object is two G1 points (expressed as 16 witness values). The final verifier (in our case this is most often the smart contract verifier) has to be aware of this aggregation object to execute a pairing and check the validity of these points. + +So, taking the example of Alice and Bob and their guessing game: + +- Alice makes her guess. Her proof is *not* recursive: it doesn't verify any proof within it! It's just a standard `assert(x != y)` circuit +- Bob verifies Alice's proof and makes his own guess. In this circuit, he is verifying a proof, so it needs to output an `aggregation object`: he is generating a recursive proof! +- Alice verifies Bob's *recursive proof*, and uses Bob's `output aggregation object` as the `input aggregation object` in her proof... Which in turn, generates another `output aggregation object`. + +One should notice that when Bob generates his first proof, he has no input aggregation object. Because he is not verifying an recursive proof, he has no `input aggregation object`. In this case, he may use zeros instead. + +We can imagine the `aggregation object` as the baton in a [relay race](https://en.wikipedia.org/wiki/Relay_race). The first runner doesn't have to receive the baton from anyone else, as he/she already starts with it. But when his/her turn is over, the next runner needs to receive it, run a bit more, and pass it along. Even though every runner could theoretically verify the baton mid-run (why not? 🏃🔍), only at the end of the race does the referee verify that the whole race is valid. + +## Some architecture + +As with everything in computer science, there's no one-size-fits all. But there are some patterns that could help understanding and implementing them. To give three examples: + +### Adding some logic to a proof verification + +This would be an approach for something like our guessing game, where proofs are sent back and forth and are verified by each opponent. This circuit would be divided in two sections: + +- A `recursive verification` section, which would be just the call to `std::verify_proof`, and that would be skipped on the first move (since there's no proof to verify) +- A `guessing` section, which is basically the logic part where the actual guessing happens + +In such a situation, and assuming Alice is first, she would skip the first part and try to guess Bob's number. Bob would then verify her proof on the first section of his run, and try to guess Alice's number on the second part, and so on. + +### Aggregating proofs + +In some one-way interaction situations, recursion would allow for aggregation of simple proofs that don't need to be immediately verified on-chain or elsewhere. + +To give a practical example, a barman wouldn't need to verify a "proof-of-age" on-chain every time he serves alcohol to a customer. Instead, the architecture would comprise two circuits: + +- A `main`, non-recursive circuit with some logic +- A `recursive` circuit meant to verify two proofs in one proof + +The customer's proofs would be intermediate, and made on their phones, and the barman could just verify them locally. He would then aggregate them into a final proof sent on-chain (or elsewhere) at the end of the day. + +### Recursively verifying different circuits + +Nothing prevents you from verifying different circuits in a recursive proof, for example: + +- A `circuit1` circuit +- A `circuit2` circuit +- A `recursive` circuit + +In this example, a regulator could verify that taxes were paid for a specific purchase by aggregating both a `payer` circuit (proving that a purchase was made and taxes were paid), and a `receipt` circuit (proving that the payment was received) + +## How fast is it + +At the time of writing, verifying recursive proofs is surprisingly fast. This is because most of the time is spent on generating the verification key that will be used to generate the next proof. So you are able to cache the verification key and reuse it later. + +Currently, Noir JS packages don't expose the functionality of loading proving and verification keys, but that feature exists in the underlying `bb.js` package. diff --git a/docs/docs/explanations/noir/traits.md b/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md similarity index 98% rename from docs/docs/explanations/noir/traits.md rename to docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md index d24deaa84da..7ba07e74f40 100644 --- a/docs/docs/explanations/noir/traits.md +++ b/docs/versioned_docs/version-v0.22.0/explanations/noir/traits.md @@ -53,7 +53,7 @@ trait Area { fn area(self) -> Field; } -fn log_area(shape: S) where S: Area { +fn log_area(shape: T) where T: Area { println(shape.area()); } ``` @@ -85,7 +85,7 @@ As seen in `log_area` above, when we want to create a function or method that is a trait, we can add a where clause to the generic function. ```rust -fn log_area(shape: S) where S: Area { +fn log_area(shape: T) where T: Area { println(shape.area()); } ``` @@ -94,7 +94,7 @@ It is also possible to apply multiple trait constraints on the same variable at operator. Similarly, we can have multiple trait constraints by separating each with a comma: ```rust -fn foo(elements: [T], thing: U) where +fn foo(elements: [T], thing: U) where T: Default + Add + Eq, U: Bar, { diff --git a/docs/docs/explanations/standard_library/traits.md b/docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md similarity index 100% rename from docs/docs/explanations/standard_library/traits.md rename to docs/versioned_docs/version-v0.22.0/explanations/standard_library/traits.md diff --git a/docs/docs/explanations/noir/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json similarity index 76% rename from docs/docs/explanations/noir/_category_.json rename to docs/versioned_docs/version-v0.22.0/getting_started/_category_.json index 448d8987d1a..5d694210bbf 100644 --- a/docs/docs/explanations/noir/_category_.json +++ b/docs/versioned_docs/version-v0.22.0/getting_started/_category_.json @@ -1,5 +1,4 @@ { - "label": "Noir", "position": 0, "collapsible": true, "collapsed": true diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md b/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md new file mode 100644 index 00000000000..f10916c39c5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/create_a_project.md @@ -0,0 +1,142 @@ +--- +title: Creating A Project +description: + Learn how to create and verify your first Noir program using Nargo, a programming language for + zero-knowledge proofs. +keywords: + [ + Nargo, + Noir, + zero-knowledge proofs, + programming language, + create Noir program, + verify Noir program, + step-by-step guide, + ] +sidebar_position: 1 + +--- + +Now that we have installed Nargo, it is time to make our first hello world program! + +## Create a Project Directory + +Noir code can live anywhere on your computer. Let us create a _projects_ folder in the home +directory to house our Noir programs. + +For Linux, macOS, and Windows PowerShell, create the directory and change directory into it by +running: + +```sh +mkdir ~/projects +cd ~/projects +``` + +## Create Our First Nargo Project + +Now that we are in the projects directory, create a new Nargo project by running: + +```sh +nargo new hello_world +``` + +> **Note:** `hello_world` can be any arbitrary project name, we are simply using `hello_world` for +> demonstration. +> +> In production, the common practice is to name the project folder as `circuits` for better +> identifiability when sitting alongside other folders in the codebase (e.g. `contracts`, `scripts`, +> `test`). + +A `hello_world` folder would be created. Similar to Rust, the folder houses _src/main.nr_ and +_Nargo.toml_ that contains the source code and environmental options of your Noir program +respectively. + +### Intro to Noir Syntax + +Let us take a closer look at _main.nr_. The default _main.nr_ generated should look like this: + +```rust +fn main(x : Field, y : pub Field) { + assert(x != y); +} +``` + +The first line of the program specifies the program's inputs: + +```rust +x : Field, y : pub Field +``` + +Program inputs in Noir are private by default (e.g. `x`), but can be labeled public using the +keyword `pub` (e.g. `y`). To learn more about private and public values, check the +[Data Types](../noir/syntax/data_types/index.md) section. + +The next line of the program specifies its body: + +```rust +assert(x != y); +``` + +The Noir syntax `assert` can be interpreted as something similar to constraints in other zk-contract languages. + +For more Noir syntax, check the [Language Concepts](../noir/syntax/comments.md) chapter. + +## Build In/Output Files + +Change directory into _hello_world_ and build in/output files for your Noir program by running: + +```sh +cd hello_world +nargo check +``` + +Two additional files would be generated in your project directory: + +_Prover.toml_ houses input values, and _Verifier.toml_ houses public values. + +## Prove Our Noir Program + +Now that the project is set up, we can create a proof of correct execution on our Noir program. + +Fill in input values for execution in the _Prover.toml_ file. For example: + +```toml +x = "1" +y = "2" +``` + +Prove the valid execution of your Noir program: + +```sh +nargo prove +``` + +A new folder _proofs_ would then be generated in your project directory, containing the proof file +`.proof`, where the project name is defined in Nargo.toml. + +The _Verifier.toml_ file would also be updated with the public values computed from program +execution (in this case the value of `y`): + +```toml +y = "0x0000000000000000000000000000000000000000000000000000000000000002" +``` + +> **Note:** Values in _Verifier.toml_ are computed as 32-byte hex values. + +## Verify Our Noir Program + +Once a proof is generated, we can verify correct execution of our Noir program by verifying the +proof file. + +Verify your proof by running: + +```sh +nargo verify +``` + +The verification will complete in silence if it is successful. If it fails, it will log the +corresponding error instead. + +Congratulations, you have now created and verified a proof for your very first Noir program! + +In the [next section](./project_breakdown.md), we will go into more detail on each step performed. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json new file mode 100644 index 00000000000..0c02fb5d4d7 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/_category_.json @@ -0,0 +1,6 @@ +{ + "position": 0, + "label": "Install Nargo", + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md b/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md new file mode 100644 index 00000000000..ddb8a250eb4 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/index.md @@ -0,0 +1,45 @@ +--- +title: Nargo Installation +description: + nargo is a command line tool for interacting with Noir programs. This page is a quick guide on how to install Nargo though the most common and easy method, noirup +keywords: [ + Nargo + Noir + Rust + Cargo + Noirup + Installation + Terminal Commands + Version Check + Nightlies + Specific Versions + Branches + Noirup Repository +] +--- + +`nargo` is the one-stop-shop for almost everything related with Noir. The name comes from our love for Rust and its package manager `cargo`. + +With `nargo`, you can start new projects, compile, execute, prove, verify, test, generate solidity contracts, and do pretty much all that is available in Noir. + +Similarly to `rustup`, we also maintain an easy installation method that covers most machines: `noirup`. + +## Installing Noirup + +Open a terminal on your machine, and write: + +```bash +curl -L https://raw.githubusercontent.com/noir-lang/noirup/main/install | bash +``` + +Close the terminal, open another one, and run + +```bash +noirup +``` + +Done. That's it. You should have the latest version working. You can check with `nargo --version`. + +You can also install nightlies, specific versions +or branches, check out the [noirup repository](https://github.com/noir-lang/noirup) for more +information. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md b/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md new file mode 100644 index 00000000000..a532f83750e --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/installation/other_install_methods.md @@ -0,0 +1,190 @@ +--- +title: Alternative Install Methods +description: + There are different ways to install Nargo, the one-stop shop and command-line tool for developing Noir programs. This guide explains other methods that don't rely on noirup, such as compiling from source, installing from binaries, and using WSL for windows +keywords: [ + Installation + Nargo + Noirup + Binaries + Compiling from Source + WSL for Windows + macOS + Linux + Nix + Direnv + Shell & editor experience + Building and testing + Uninstalling Nargo + Noir vs code extension +] +sidebar_position: 1 +--- + + +## Installation + +The most common method of installing Nargo is through [Noirup](./index.md) + +However, there are other methods for installing Nargo: + +- [Binaries](#binaries) +- [Compiling from Source](#compile-from-source) +- [WSL for Windows](#wsl-for-windows) + +### Binaries + +See [GitHub Releases](https://github.com/noir-lang/noir/releases) for the latest and previous +platform specific binaries. + +#### Step 1 + +Paste and run the following in the terminal to extract and install the binary: + +> **macOS / Linux:** If you are prompted with `Permission denied` when running commands, prepend +> `sudo` and re-run it. + +##### macOS (Apple Silicon) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-aarch64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-aarch64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### macOS (Intel) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-apple-darwin.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-apple-darwin.tar.gz -C $HOME/.nargo/bin/ && \ +echo '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.zshrc && \ +source ~/.zshrc +``` + +##### Linux (Bash) + +```bash +mkdir -p $HOME/.nargo/bin && \ +curl -o $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -L https://github.com/noir-lang/noir/releases/download/v0.6.0/nargo-x86_64-unknown-linux-gnu.tar.gz && \ +tar -xvf $HOME/.nargo/bin/nargo-x86_64-unknown-linux-gnu.tar.gz -C $HOME/.nargo/bin/ && \ +echo -e '\nexport PATH=$PATH:$HOME/.nargo/bin' >> ~/.bashrc && \ +source ~/.bashrc +``` + +#### Step 2 + +Check if the installation was successful by running `nargo --version`. You should get a version number. + +> **macOS:** If you are prompted with an OS alert, right-click and open the _nargo_ executable from +> Finder. Close the new terminal popped up and `nargo` should now be accessible. + +### Option 3: Compile from Source + +Due to the large number of native dependencies, Noir projects uses [Nix](https://nixos.org/) and [direnv](https://direnv.net/) to streamline the development experience. It helps mitigating issues commonly associated with dependency management, such as conflicts between required package versions for different projects (often referred to as "dependency hell"). + +Combined with direnv, which automatically sets or clears environment variables based on the directory, it further simplifies the development process by seamlessly integrating with the developer's shell, facilitating an efficient and reliable workflow for managing and deploying Noir projects with multiple dependencies. + +#### Setting up your environment + +For the best experience, please follow these instructions to setup your environment: + +1. Install Nix following [their guide](https://nixos.org/download.html) for your operating system. +2. Create the file `~/.config/nix/nix.conf` with the contents: + +```ini +experimental-features = nix-command +extra-experimental-features = flakes +``` + +3. Install direnv into your Nix profile by running: + +```sh +nix profile install nixpkgs#direnv +``` + +4. Add direnv to your shell following [their guide](https://direnv.net/docs/hook.html). + 1. For bash or zshell, add `eval "$(direnv hook bash)"` or `eval "$(direnv hook zsh)"` to your ~/.bashrc or ~/.zshrc file, respectively. +5. Restart your shell. + +#### Shell & editor experience + +Now that your environment is set up, you can get to work on the project. + +1. Clone the repository, such as: + +```sh +git clone git@github.com:noir-lang/noir +``` + +> Replacing `noir` with whichever repository you want to work on. + +2. Navigate to the directory: + +```sh +cd noir +``` + +> Replacing `noir` with whichever repository you cloned. + +3. You should see a **direnv error** because projects aren't allowed by default. Make sure you've reviewed and trust our `.envrc` file, then you need to run: + +```sh +direnv allow +``` + +4. Now, wait awhile for all the native dependencies to be built. This will take some time and direnv will warn you that it is taking a long time, but we just need to let it run. + +5. Once you are presented with your prompt again, you can start your editor within the project directory (we recommend [VSCode](https://code.visualstudio.com/)): + +```sh +code . +``` + +6. (Recommended) When launching VSCode for the first time, you should be prompted to install our recommended plugins. We highly recommend installing these for the best development experience. + +#### Building and testing + +Assuming you are using `direnv` to populate your environment, building and testing the project can be done +with the typical `cargo build`, `cargo test`, and `cargo clippy` commands. You'll notice that the `cargo` version matches the version we specify in `rust-toolchain.toml`, which is 1.71.1 at the time of this writing. + +If you want to build the entire project in an isolated sandbox, you can use Nix commands: + +1. `nix build .` (or `nix build . -L` for verbose output) to build the project in a Nix sandbox. +2. `nix flake check` (or `nix flake check -L` for verbose output) to run clippy and tests in a Nix sandbox. + +#### Without `direnv` + +If you have hesitations with using direnv, you can launch a subshell with `nix develop` and then launch your editor from within the subshell. However, if VSCode was already launched in the project directory, the environment won't be updated. + +Advanced: If you aren't using direnv nor launching your editor within the subshell, you can try to install Barretenberg and other global dependencies the package needs. This is an advanced workflow and likely won't receive support! + +### Option 4: WSL (for Windows) + +The default backend for Noir (Barretenberg) doesn't provide Windows binaries at this time. For that reason, Noir cannot be installed natively. However, it is available by using Windows Subsystem for Linux (WSL). + +Step 1: Follow the instructions [here](https://learn.microsoft.com/en-us/windows/wsl/install) to install and run WSL. + +step 2: Follow the [Noirup instructions](./index.md). + +## Uninstalling Nargo + +### Noirup + +If you installed Noir with `noirup`, you can uninstall Noir by removing the files in `~/.nargo`, `~/nargo` and `~/noir_cache`. + +```bash +rm -r ~/.nargo +rm -r ~/nargo +rm -r ~/noir_cache +``` + +### Nix + +If you installed Noir with Nix or from source, you can remove the binary located at `~/.nix-profile/bin/nargo`. + +```bash +rm ~/.nix-profile/bin/nargo +``` diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md b/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md new file mode 100644 index 00000000000..c4e2a9ae003 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/project_breakdown.md @@ -0,0 +1,199 @@ +--- +title: Project Breakdown +description: + Learn about the anatomy of a Nargo project, including the purpose of the Prover and Verifier TOML + files, and how to prove and verify your program. +keywords: + [Nargo, Nargo project, Prover.toml, Verifier.toml, proof verification, private asset transfer] +sidebar_position: 2 +--- + +This section breaks down our hello world program in section _1.2_. We elaborate on the project +structure and what the `prove` and `verify` commands did in the previous section. + +## Anatomy of a Nargo Project + +Upon creating a new project with `nargo new` and building the in/output files with `nargo check` +commands, you would get a minimal Nargo project of the following structure: + + - src + - Prover.toml + - Verifier.toml + - Nargo.toml + +The source directory _src_ holds the source code for your Noir program. By default only a _main.nr_ +file will be generated within it. + +### Prover.toml + +_Prover.toml_ is used for specifying the input values for executing and proving the program. You can specify `toml` files with different names by using the `--prover-name` or `-p` flags, see the [Prover](#provertoml) section below. Optionally you may specify expected output values for prove-time checking as well. + +### Verifier.toml + +_Verifier.toml_ contains public in/output values computed when executing the Noir program. + +### Nargo.toml + +_Nargo.toml_ contains the environmental options of your project. It contains a "package" section and a "dependencies" section. + +Example Nargo.toml: + +```toml +[package] +name = "noir_starter" +type = "bin" +authors = ["Alice"] +compiler_version = "0.9.0" +description = "Getting started with Noir" +entry = "circuit/main.nr" +license = "MIT" + +[dependencies] +ecrecover = {tag = "v0.9.0", git = "https://github.com/colinnielsen/ecrecover-noir.git"} +``` + +Nargo.toml for a [workspace](../noir/modules_packages_crates/workspaces.md) will look a bit different. For example: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +#### Package section + +The package section requires a number of fields including: + +- `name` (**required**) - the name of the package +- `type` (**required**) - can be "bin", "lib", or "contract" to specify whether its a binary, library or Aztec contract +- `authors` (optional) - authors of the project +- `compiler_version` - specifies the version of the compiler to use. This is enforced by the compiler and follow's [Rust's versioning](https://doc.rust-lang.org/cargo/reference/manifest.html#the-version-field), so a `compiler_version = 0.18.0` will enforce Nargo version 0.18.0, `compiler_version = ^0.18.0` will enforce anything above 0.18.0 but below 0.19.0, etc. For more information, see how [Rust handles these operators](https://docs.rs/semver/latest/semver/enum.Op.html) +- `description` (optional) +- `entry` (optional) - a relative filepath to use as the entry point into your package (overrides the default of `src/lib.nr` or `src/main.nr`) +- `backend` (optional) +- `license` (optional) + +#### Dependencies section + +This is where you will specify any dependencies for your project. See the [Dependencies page](../noir/modules_packages_crates/dependencies.md) for more info. + +`./proofs/` and `./contract/` directories will not be immediately visible until you create a proof or +verifier contract respectively. + +### main.nr + +The _main.nr_ file contains a `main` method, this method is the entry point into your Noir program. + +In our sample program, _main.nr_ looks like this: + +```rust +fn main(x : Field, y : Field) { + assert(x != y); +} +``` + +The parameters `x` and `y` can be seen as the API for the program and must be supplied by the +prover. Since neither `x` nor `y` is marked as public, the verifier does not supply any inputs, when +verifying the proof. + +The prover supplies the values for `x` and `y` in the _Prover.toml_ file. + +As for the program body, `assert` ensures the satisfaction of the condition (e.g. `x != y`) is +constrained by the proof of the execution of said program (i.e. if the condition was not met, the +verifier would reject the proof as an invalid proof). + +### Prover.toml + +The _Prover.toml_ file is a file which the prover uses to supply his witness values(both private and +public). + +In our hello world program the _Prover.toml_ file looks like this: + +```toml +x = "1" +y = "2" +``` + +When the command `nargo prove` is executed, two processes happen: + +1. Noir creates a proof that `x` which holds the value of `1` and `y` which holds the value of `2` + is not equal. This not equal constraint is due to the line `assert(x != y)`. + +2. Noir creates and stores the proof of this statement in the _proofs_ directory in a file called your-project.proof. So if your project is named "private_voting" (defined in the project Nargo.toml), the proof will be saved at `./proofs/private_voting.proof`. Opening this file will display the proof in hex format. + +#### Arrays of Structs + +The following code shows how to pass an array of structs to a Noir program to generate a proof. + +```rust +// main.nr +struct Foo { + bar: Field, + baz: Field, +} + +fn main(foos: [Foo; 3]) -> pub Field { + foos[2].bar + foos[2].baz +} +``` + +Prover.toml: + +```toml +[[foos]] # foos[0] +bar = 0 +baz = 0 + +[[foos]] # foos[1] +bar = 0 +baz = 0 + +[[foos]] # foos[2] +bar = 1 +baz = 2 +``` + +#### Custom toml files + +You can specify a `toml` file with a different name to use for proving by using the `--prover-name` or `-p` flags. + +This command looks for proof inputs in the default **Prover.toml** and generates the proof and saves it at `./proofs/.proof`: + +```bash +nargo prove +``` + +This command looks for proof inputs in the custom **OtherProver.toml** and generates proof and saves it at `./proofs/.proof`: + +```bash +nargo prove -p OtherProver +``` + +## Verifying a Proof + +When the command `nargo verify` is executed, two processes happen: + +1. Noir checks in the _proofs_ directory for a proof file with the project name (eg. test_project.proof) + +2. If that file is found, the proof's validity is checked + +> **Note:** The validity of the proof is linked to the current Noir program; if the program is +> changed and the verifier verifies the proof, it will fail because the proof is not valid for the +> _modified_ Noir program. + +In production, the prover and the verifier are usually two separate entities. A prover would +retrieve the necessary inputs, execute the Noir program, generate a proof and pass it to the +verifier. The verifier would then retrieve the public inputs from usually external sources and +verifies the validity of the proof against it. + +Take a private asset transfer as an example: + +A user on browser as the prover would retrieve private inputs (e.g. the user's private key) and +public inputs (e.g. the user's encrypted balance on-chain), compute the transfer, generate a proof +and submit it to the verifier smart contract. + +The verifier contract would then draw the user's encrypted balance directly from the blockchain and +verify the proof submitted against it. If the verification passes, additional functions in the +verifier contract could trigger (e.g. approve the asset transfer). + +Now that you understand the concepts, you'll probably want some editor feedback while you are writing more complex code. diff --git a/docs/docs/explanations/_category_.json b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json similarity index 74% rename from docs/docs/explanations/_category_.json rename to docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json index 151ee204894..dff520ebc41 100644 --- a/docs/docs/explanations/_category_.json +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/_category_.json @@ -1,5 +1,6 @@ { "position": 3, + "label": "Tooling", "collapsible": true, "collapsed": true } diff --git a/docs/docs/getting_started/tooling/index.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md similarity index 100% rename from docs/docs/getting_started/tooling/index.md rename to docs/versioned_docs/version-v0.22.0/getting_started/tooling/index.md diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md new file mode 100644 index 00000000000..81e0356ef8a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/language_server.md @@ -0,0 +1,43 @@ +--- +title: Language Server +description: Learn about the Noir Language Server, how to install the components, and configuration that may be required. +keywords: [Nargo, Language Server, LSP, VSCode, Visual Studio Code] +sidebar_position: 0 +--- + +This section helps you install and configure the Noir Language Server. + +The Language Server Protocol (LSP) has two components, the [Server](#language-server) and the [Client](#language-client). Below we describe each in the context of Noir. + +## Language Server + +The Server component is provided by the Nargo command line tool that you installed at the beginning of this guide. +As long as Nargo is installed and you've used it to run other commands in this guide, it should be good to go! + +If you'd like to verify that the `nargo lsp` command is available, you can run `nargo --help` and look for `lsp` in the list of commands. If you see it, you're using a version of Noir with LSP support. + +## Language Client + +The Client component is usually an editor plugin that launches the Server. It communicates LSP messages between the editor and the Server. For example, when you save a file, the Client will alert the Server, so it can try to compile the project and report any errors. + +Currently, Noir provides a Language Client for Visual Studio Code via the [vscode-noir](https://github.com/noir-lang/vscode-noir) extension. You can install it via the [Visual Studio Marketplace](https://marketplace.visualstudio.com/items?itemName=noir-lang.vscode-noir). + +> **Note:** Noir's Language Server Protocol support currently assumes users' VSCode workspace root to be the same as users' Noir project root (i.e. where Nargo.toml lies). +> +> If LSP features seem to be missing / malfunctioning, make sure you are opening your Noir project directly (instead of as a sub-folder) in your VSCode instance. + +When your language server is running correctly and the VSCode plugin is installed, you should see handy codelens buttons for compilation, measuring circuit size, execution, and tests: + +![Compile and Execute](@site/static/img/codelens_compile_execute.png) +![Run test](@site/static/img/codelens_run_test.png) + +You should also see your tests in the `testing` panel: + +![Testing panel](@site/static/img/codelens_testing_panel.png) + +### Configuration + +- **Noir: Enable LSP** - If checked, the extension will launch the Language Server via `nargo lsp` and communicate with it. +- **Noir: Nargo Flags** - Additional flags may be specified if you require them to be added when the extension calls `nargo lsp`. +- **Noir: Nargo Path** - An absolute path to a Nargo binary with the `lsp` command. This may be useful if Nargo is not within the `PATH` of your editor. +- **Noir > Trace: Server** - Setting this to `"messages"` or `"verbose"` will log LSP messages between the Client and Server. Useful for debugging. diff --git a/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md new file mode 100644 index 00000000000..d3e0c522473 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/getting_started/tooling/testing.md @@ -0,0 +1,62 @@ +--- +title: Testing in Noir +description: Learn how to use Nargo to test your Noir program in a quick and easy way +keywords: [Nargo, testing, Noir, compile, test] +sidebar_position: 1 +--- + +You can test your Noir programs using Noir circuits. + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. + +For example if you have a program like: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test] +fn test_add() { + assert(add(2,2) == 4); + assert(add(0,1) == 1); + assert(add(1,0) == 1); +} +``` + +Running `nargo test` will test that the `test_add` function can be executed while satisfying all +the constraints which allows you to test that add returns the expected values. Test functions can't +have any arguments currently. + +### Test fail + +You can write tests that are expected to fail by using the decorator `#[test(should_fail)]`. For example: + +```rust +fn add(x: u64, y: u64) -> u64 { + x + y +} +#[test(should_fail)] +fn test_add() { + assert(add(2,2) == 5); +} +``` + +You can be more specific and make it fail with a specific reason by using `should_fail_with = "`: + +```rust +fn main(african_swallow_avg_speed : Field) { + assert(african_swallow_avg_speed == 65, "What is the airspeed velocity of an unladen swallow"); +} + +#[test] +fn test_king_arthur() { + main(65); +} + +#[test(should_fail_with = "What is the airspeed velocity of an unladen swallow")] +fn test_bridgekeeper() { + main(32); +} + +``` diff --git a/docs/versioned_docs/version-v0.22.0/how_to/_category_.json b/docs/versioned_docs/version-v0.22.0/how_to/_category_.json new file mode 100644 index 00000000000..23b560f610b --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md b/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md new file mode 100644 index 00000000000..2f7be604401 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/how-to-recursion.md @@ -0,0 +1,184 @@ +--- +title: How to use recursion on NoirJS +description: Learn how to implement recursion with NoirJS, a powerful tool for creating smart contracts on the EVM blockchain. This guide assumes familiarity with NoirJS, solidity verifiers, and the Barretenberg proving backend. Discover how to generate both final and intermediate proofs using `noir_js` and `backend_barretenberg`. +keywords: + [ + "NoirJS", + "EVM blockchain", + "smart contracts", + "recursion", + "solidity verifiers", + "Barretenberg backend", + "noir_js", + "backend_barretenberg", + "intermediate proofs", + "final proofs", + "nargo compile", + "json import", + "recursive circuit", + "recursive app" + ] +sidebar_position: 1 +--- + +This guide shows you how to use recursive proofs in your NoirJS app. For the sake of clarity, it is assumed that: + +- You already have a NoirJS app. If you don't, please visit the [NoirJS tutorial](../tutorials/noirjs_app.md) and the [reference](../reference/NoirJS/noir_js/index.md). +- You are familiar with what are recursive proofs and you have read the [recursion explainer](../explainers/explainer-recursion.md) +- You already built a recursive circuit following [the reference](../noir/standard_library/recursion.md), and understand how it works. + +It is also assumed that you're not using `noir_wasm` for compilation, and instead you've used [`nargo compile`](../reference/nargo_commands.md) to generate the `json` you're now importing into your project. However, the guide should work just the same if you're using `noir_wasm`. + +:::info + +As you've read in the [explainer](../explainers/explainer-recursion.md), a recursive proof is an intermediate proof. Meaning it doesn't necessarily generate the final step that makes it verifiable in a smart contract. However, it is easy to verify within another circuit. + +While "standard" usage of NoirJS packages abstracts final proofs, it currently lacks the necessary interface to abstract away intermediate proofs. Which means these proofs need to be created by using the backend directly. + +In short: + +- `noir_js` generates *only* final proofs +- `backend_barretenberg` generates both types of proofs + +::: + +In a standard recursive app, you're also dealing with at least two circuits. For the purpose of this guide, we will assume these two: + +- `main`: a circuit of type `assert(x != y)` +- `recursive`: a circuit that verifies `main` + +For a full example on how recursive proofs work, please refer to the [noir-examples](https://github.com/noir-lang/noir-examples) repository. We will *not* be using it as a reference for this guide. + +## Step 1: Setup + +In a common NoirJS app, you need to instantiate a backend with something like `const backend = new Backend(circuit)`. Then you feed it to the `noir_js` interface. + +For recursion, this doesn't happen, and the only need for `noir_js` is only to `execute` a circuit and get its witness and return value. Everything else is not interfaced, so it needs to happen on the `backend` object. + +It is also recommended that you instantiate the backend with as many threads as possible, to allow for maximum concurrency: + +```js +const backend = new Backend(circuit, { threads: 8 }) +``` + +:::tip +You can use the [`os.cpus()`](https://nodejs.org/api/os.html#oscpus) object in `nodejs` or [`navigator.hardwareConcurrency`](https://developer.mozilla.org/en-US/docs/Web/API/Navigator/hardwareConcurrency) on the browser to make the most out of those glorious cpu cores +::: + +## Step 2: Generating the witness and the proof for `main` + +After instantiating the backend, you should also instantiate `noir_js`. We will use it to execute the circuit and get the witness. + +```js +const noir = new Noir(circuit, backend) +const { witness } = noir.execute(input) +``` + +With this witness, you are now able to generate the intermediate proof for the main circuit: + +```js +const { proof, publicInputs } = await backend.generateIntermediateProof(witness) +``` + +:::warning + +Always keep in mind what is actually happening on your development process, otherwise you'll quickly become confused about what circuit are we actually running and why! + +In this case, you can imagine that Alice (running the `main` circuit) is proving something to Bob (running the `recursive` circuit), and Bob is verifying her proof within his proof. + +With this in mind, it becomes clear that our intermediate proof is the one *meant to be verified within another circuit*. So it is Alice's. Actually, the only final proof in this theoretical scenario would be the last one, sent on-chain. + +::: + +## Step 3 - Verification and proof artifacts + +Optionally, you are able to verify the intermediate proof: + +```js +const verified = await backend.verifyIntermediateProof({ proof, publicInputs }) +``` + +This can be useful to make sure our intermediate proof was correctly generated. But the real goal is to do it within another circuit. For that, we need to generate the intermediate artifacts: + +```js +const { proofAsFields, vkAsFields, vkHash } = await backend.generateIntermediateProofArtifacts( { publicInputs, proof }, publicInputsCount) +``` + +This call takes the public inputs and the proof, but also the public inputs count. While this is easily retrievable by simply counting the `publicInputs` length, the backend interface doesn't currently abstract it away. + +:::info + +The `proofAsFields` has a constant size `[Field; 93]`. However, currently the backend doesn't remove the public inputs from the proof when converting it. + +This means that if your `main` circuit has two public inputs, then you should also modify the recursive circuit to accept a proof with the public inputs appended. This means that in our example, since `y` is a public input, our `proofAsFields` is of type `[Field; 94]`. + +Verification keys in Barretenberg are always of size 114. + +::: + +:::warning + +One common mistake is to forget *who* makes this call. + +In a situation where Alice is generating the `main` proof, if she generates the proof artifacts and sends them to Bob, which gladly takes them as true, this would mean Alice could prove anything! + +Instead, Bob needs to make sure *he* extracts the proof artifacts, using his own instance of the `main` circuit backend. This way, Alice has to provide a valid proof for the correct `main` circuit. + +::: + +## Step 4 - Recursive proof generation + +With the artifacts, generating a recursive proof is no different from a normal proof. You simply use the `backend` (with the recursive circuit) to generate it: + +```js +const recursiveInputs = { + verification_key: vkAsFields, // array of length 114 + proof: proofAsFields, // array of length 93 + size of public inputs + publicInputs: [mainInput.y], // using the example above, where `y` is the only public input + key_hash: vkHash, + input_aggregation_object: Array(16).fill(0) // this circuit is verifying a non-recursive proof, so there's no input aggregation object: just use zero +} + +const { witness, returnValue } = noir.execute(recursiveInputs) // we're executing the recursive circuit now! +const { proof, publicInputs } = backend.generateFinalProof(witness) +const verified = backend.verifyFinalProof({ proof, publicInputs }) +``` + +You can obviously chain this proof into another proof. In fact, if you're using recursive proofs, you're probably interested of using them this way! In that case, you should keep in mind the `returnValue`, as it will contain the `input_aggregation_object` for the next proof. + +:::tip + +Managing circuits and "who does what" can be confusing. To make sure your naming is consistent, you can keep them in an object. For example: + +```js +const circuits = { +main: mainJSON, +recursive: recursiveJSON +} +const backends = { +main: new BarretenbergBackend(circuits.main), +recursive: new BarretenbergBackend(circuits.recursive) +} +const noir_programs = { +main: new Noir(circuits.main, backends.main), +recursive: new Noir(circuits.recursive, backends.recursive) +} +``` + +This allows you to neatly call exactly the method you want without conflicting names: + +```js +// Alice runs this 👇 +const { witness: mainWitness } = await noir_programs.main.execute(input) +const proof = await backends.main.generateIntermediateProof(mainWitness) + +// Bob runs this 👇 +const verified = await backends.main.verifyIntermediateProof(proof) +const { proofAsFields, vkAsFields, vkHash } = await backends.main.generateIntermediateProofArtifacts( + proof, + numPublicInputs, +); +const recursiveProof = await noir_programs.recursive.generateFinalProof(recursiveInputs) +``` + +::: diff --git a/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx b/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx new file mode 100644 index 00000000000..34074659ac1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/merkle-proof.mdx @@ -0,0 +1,48 @@ +--- +title: Prove Merkle Tree Membership +description: + Learn how to use merkle membership proof in Noir to prove that a given leaf is a member of a + merkle tree with a specified root, at a given index. +keywords: + [merkle proof, merkle membership proof, Noir, rust, hash function, Pedersen, sha256, merkle tree] +--- + +Let's walk through an example of a merkle membership proof in Noir that proves that a given leaf is +in a merkle tree. + +```rust +use dep::std; + +fn main(message : [Field; 62], index : Field, hashpath : [Field; 40], root : Field) { + let leaf = std::hash::hash_to_field(message); + let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); + assert(merkle_root == root); +} + +``` + +The message is hashed using `hash_to_field`. The specific hash function that is being used is chosen +by the backend. The only requirement is that this hash function can heuristically be used as a +random oracle. If only collision resistance is needed, then one can call `std::hash::pedersen_hash` +instead. + +```rust +let leaf = std::hash::hash_to_field(message); +``` + +The leaf is then passed to a compute_merkle_root function with the root, index and hashpath. The returned root can then be asserted to be the same as the provided root. + +```rust +let merkle_root = std::merkle::compute_merkle_root(leaf, index, hashpath); +assert (merkle_root == root); +``` + +> **Note:** It is possible to re-implement the merkle tree implementation without standard library. +> However, for most usecases, it is enough. In general, the standard library will always opt to be +> as conservative as possible, while striking a balance with efficiency. + +An example, the merkle membership proof, only requires a hash function that has collision +resistance, hence a hash function like Pedersen is allowed, which in most cases is more efficient +than the even more conservative sha256. + +[View an example on the starter repo](https://github.com/noir-lang/noir-examples/blob/3ea09545cabfa464124ec2f3ea8e60c608abe6df/stealthdrop/circuits/src/main.nr#L20) diff --git a/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md b/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md new file mode 100644 index 00000000000..8022b0e5f20 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/how_to/solidity_verifier.md @@ -0,0 +1,130 @@ +--- +title: Generate a Solidity Verifier +description: + Learn how to run the verifier as a smart contract on the blockchain. Compile a Solidity verifier + contract for your Noir program and deploy it on any EVM blockchain acting as a verifier smart + contract. Read more to find out +keywords: + [ + solidity verifier, + smart contract, + blockchain, + compiler, + plonk_vk.sol, + EVM blockchain, + verifying Noir programs, + proving backend, + Barretenberg, + ] +sidebar_position: 0 +--- + +For certain applications, it may be desirable to run the verifier as a smart contract instead of on +a local machine. + +Compile a Solidity verifier contract for your Noir program by running: + +```sh +nargo codegen-verifier +``` + +A new `contract` folder would then be generated in your project directory, containing the Solidity +file `plonk_vk.sol`. It can be deployed on any EVM blockchain acting as a verifier smart contract. + +> **Note:** It is possible to compile verifier contracts of Noir programs for other smart contract +> platforms as long as the proving backend supplies an implementation. +> +> Barretenberg, the default proving backend for Nargo, supports compilation of verifier contracts in +> Solidity only for the time being. + +## Verify + +To verify a proof using the Solidity verifier contract, call the `verify` function with the +following signature: + +```solidity +function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) +``` + +You can see an example of how the `verify` function is called in the example zk voting application [here](https://github.com/noir-lang/noir-examples/blob/33e598c257e2402ea3a6b68dd4c5ad492bce1b0a/foundry-voting/src/zkVote.sol#L35): + +```solidity +function castVote(bytes calldata proof, uint proposalId, uint vote, bytes32 nullifierHash) public returns (bool) { + // ... + bytes32[] memory publicInputs = new bytes32[](4); + publicInputs[0] = merkleRoot; + publicInputs[1] = bytes32(proposalId); + publicInputs[2] = bytes32(vote); + publicInputs[3] = nullifierHash; + require(verifier.verify(proof, publicInputs), "Invalid proof"); +``` + +### Public Inputs + +:::tip + +A circuit doesn't have the concept of a return value. Return values are just syntactic sugar in +Noir. + +Under the hood, the return value is passed as an input to the circuit and is checked at the end of +the circuit program. + +::: + +The verifier contract uses the output (return) value of a Noir program as a public input. So if you +have the following function + +```rust +fn main( + // Public inputs + pubkey_x: pub Field, + pubkey_y: pub Field, + // Private inputs + priv_key: Field, +) -> pub Field +``` + +then `verify` in `plonk_vk.sol` will expect 3 public inputs. Passing two inputs will result in an +error like `Reason: PUBLIC_INPUT_COUNT_INVALID(3, 2)`. + +In this case the 3 inputs to `verify` would be ordered as `[pubkey_x, pubkey_y, return]`. + +#### Struct inputs + +Consider the following program: + +```rust +struct Type1 { + val1: Field, + val2: Field, +} + +struct Nested { + t1: Type1, + is_true: bool, +} + +fn main(x: pub Field, nested: pub Nested, y: pub Field) { + //... +} +``` + +Structs will be flattened so that the array of inputs is 1-dimensional array. The order of these inputs would be flattened to: `[x, nested.t1.val1, nested.t1.val2, nested.is_true, y]` + +## Noir for EVM chains + +You can currently deploy the Solidity verifier contracts to most EVM compatible chains. EVM chains that have been tested and are known to work include: + +- Optimism +- Arbitrum +- Polygon PoS +- Scroll +- Celo + +Other EVM chains should work, but have not been tested directly by our team. If you test any other chains, please open a PR on this page to update the list. See [this doc](https://github.com/noir-lang/noir-starter/tree/main/with-foundry#testing-on-chain) for more info about testing verifier contracts on different EVM chains. + +### Unsupported chains + +Unfortunately not all "EVM" chains are supported. + +**zkSync** and the **Polygon zkEVM** do _not_ currently support proof verification via Solidity verifier contracts. They are missing the bn256 precompile contract that the verifier contract requires. Once these chains support this precompile, they may work. diff --git a/docs/versioned_docs/version-v0.22.0/index.md b/docs/versioned_docs/version-v0.22.0/index.md new file mode 100644 index 00000000000..eaf8c59f935 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/index.md @@ -0,0 +1,84 @@ +--- +title: Noir +description: + Learn about the public alpha release of Noir, a domain specific language heavily influenced by Rust that compiles to + an intermediate language which can be compiled to an arithmetic circuit or a rank-1 constraint system. +keywords: + [ + Noir, + Domain Specific Language, + Rust, + Intermediate Language, + Arithmetic Circuit, + Rank-1 Constraint System, + Ethereum Developers, + Protocol Developers, + Blockchain Developers, + Proving System, + Smart Contract Language, + ] +sidebar_position: 0 +--- + +## What's new about Noir? + +Noir, a domain-specific language crafted for SNARK proving systems, stands out with its simplicity, flexibility, +and robust capabilities. Unlike conventional approaches that compile directly to a fixed NP-complete language, +Noir takes a two-pronged path. It first compiles to an adaptable intermediate language known as ACIR. From there, +depending on the project's needs, ACIR can be further compiled into an arithmetic circuit for integration with Aztec's +barretenberg backend or transformed into a rank-1 constraint system suitable for R1CS backends like Arkworks' Marlin +backend, among others. + +This innovative design introduces unique challenges, yet it strategically separates the programming language from the +backend. Noir's approach echoes the modular philosophy of LLVM, offering developers a versatile toolkit for cryptographic +programming. + +## Who is Noir for? + +### Solidity Developers + +Noir streamlines the creation of Solidity contracts that interface with SNARK systems. +[`Utilize the nargo codegen-verifier`](./reference/nargo_commands.md#nargo-codegen-verifier) command to construct verifier +contracts efficiently. While the current alpha version offers this as a direct feature, future updates aim +to modularize this process for even greater ease of use. + +Noir currently includes a command to create a Solidity contract which verifies your Noir program. This will be +modularized in the future; however, as of the alpha, you can use the `nargo codegen-verifier` command to create a verifier contract. + +### Protocol Developers + +Should the Aztec backend not align with your existing tech stack, or if you're inclined to integrate alternative +proving systems, Noir's agnostic compilation to a proof-agnostic intermediate language offers unmatched flexibility. +This allows protocol engineers the freedom to substitute the default PLONK-based system with an alternative of their +choice, tailoring the proving system to their specific needs. + +### Blockchain developers + +Blockchain developers often face environmental constraints, such as predetermined proving systems and smart contract +languages. Noir addresses this by enabling the implementation of custom proving system backends and smart contract +interfaces, ensuring seamless integration with your blockchain's architecture, and expanding the horizons for innovation +within your projects. + +## Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the +[awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains + the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage + proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing + for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and + return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of + sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data + type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, + allowing results that aren't whole numbers + +See the section on [dependencies](noir/modules_packages_crates/dependencies.md) for more information. diff --git a/docs/versioned_docs/version-v0.22.0/migration_notes.md b/docs/versioned_docs/version-v0.22.0/migration_notes.md new file mode 100644 index 00000000000..184ca283539 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/migration_notes.md @@ -0,0 +1,91 @@ +--- +title: Migration notes +description: Read about migration notes from previous versions, which could solve problems while updating +keywords: [Noir, notes, migration, updating, upgrading] +--- + +Noir is in full-speed development. Things break fast, wild, and often. This page attempts to leave some notes on errors you might encounter when upgrading and how to resolve them until proper patches are built. + +## ≥0.19 + +### Enforcing `compiler_version` + +From this version on, the compiler will check for the `compiler_version` field in `Nargo.toml`, and will error if it doesn't match the current Nargo version in use. + +To update, please make sure this field in `Nargo.toml` matches the output of `nargo --version`. + +## ≥0.14 + +The index of the [for loops](noir/syntax/control_flow.md#loops) is now of type `u64` instead of `Field`. An example refactor would be: + +```rust +for i in 0..10 { + let i = i as Field; +} +``` + +## ≥v0.11.0 and Nargo backend + +From this version onwards, Nargo starts managing backends through the `nargo backend` command. Upgrading to the versions per usual steps might lead to: + +### `backend encountered an error` + +This is likely due to the existing locally installed version of proving backend (e.g. barretenberg) is incompatible with the version of Nargo in use. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo prove +``` + +with your Noir program. + +This will trigger the download and installation of the latest version of barretenberg compatible with your Nargo in use. + +### `backend encountered an error: illegal instruction` + +On certain Intel-based systems, an `illegal instruction` error may arise due to incompatibility of barretenberg with certain CPU instructions. + +To fix the issue: + +1. Uninstall the existing backend + +```bash +nargo backend uninstall acvm-backend-barretenberg +``` + +You may replace _acvm-backend-barretenberg_ with the name of your backend listed in `nargo backend ls` or in ~/.nargo/backends. + +2. Reinstall a compatible version of the proving backend. + +If you are using the default barretenberg backend, simply run: + +``` +nargo backend install acvm-backend-barretenberg https://github.com/noir-lang/barretenberg-js-binary/raw/master/run-bb.tar.gz +``` + +This downloads and installs a specific bb.js based version of barretenberg binary from GitHub. + +The gzipped file is running [this bash script](https://github.com/noir-lang/barretenberg-js-binary/blob/master/run-bb-js.sh), where we need to gzip it as the Nargo currently expect the backend to be zipped up. + +Then run: + +``` +DESIRED_BINARY_VERSION=0.8.1 nargo info +``` + +This overrides the bb native binary with a bb.js node application instead, which should be compatible with most if not all hardware. This does come with the drawback of being generally slower than native binary. + +0.8.1 indicates bb.js version 0.8.1, so if you change that it will update to a different version or the default version in the script if none was supplied. diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json new file mode 100644 index 00000000000..1debcfe7675 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Modules, Packages and Crates", + "position": 2, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md new file mode 100644 index 00000000000..760a463094c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/crates_and_packages.md @@ -0,0 +1,43 @@ +--- +title: Crates and Packages +description: Learn how to use Crates and Packages in your Noir project +keywords: [Nargo, dependencies, package management, crates, package] +sidebar_position: 0 +--- + +## Crates + +A crate is the smallest amount of code that the Noir compiler considers at a time. +Crates can contain modules, and the modules may be defined in other files that get compiled with the crate, as we’ll see in the coming sections. + +### Crate Types + +A Noir crate can come in several forms: binaries, libraries or contracts. + +#### Binaries + +_Binary crates_ are programs which you can compile to an ACIR circuit which you can then create proofs against. Each must have a function called `main` that defines the ACIR circuit which is to be proved. + +#### Libraries + +_Library crates_ don't have a `main` function and they don't compile down to ACIR. Instead they define functionality intended to be shared with multiple projects, and eventually included in a binary crate. + +#### Contracts + +Contract crates are similar to binary crates in that they compile to ACIR which you can create proofs against. They are different in that they do not have a single `main` function, but are a collection of functions to be deployed to the [Aztec network](https://aztec.network). You can learn more about the technical details of Aztec in the [monorepo](https://github.com/AztecProtocol/aztec-packages) or contract [examples](https://github.com/AztecProtocol/aztec-packages/tree/master/yarn-project/noir-contracts/contracts). + +### Crate Root + +Every crate has a root, which is the source file that the compiler starts, this is also known as the root module. The Noir compiler does not enforce any conditions on the name of the file which is the crate root, however if you are compiling via Nargo the crate root must be called `lib.nr` or `main.nr` for library or binary crates respectively. + +## Packages + +A Nargo _package_ is a collection of one of more crates that provides a set of functionality. A package must include a Nargo.toml file. + +A package _must_ contain either a library or a binary crate, but not both. + +### Differences from Cargo Packages + +One notable difference between Rust's Cargo and Noir's Nargo is that while Cargo allows a package to contain an unlimited number of binary crates and a single library crate, Nargo currently only allows a package to contain a single crate. + +In future this restriction may be lifted to allow a Nargo package to contain both a binary and library crate or multiple binary crates. diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md new file mode 100644 index 00000000000..a37dc401b7d --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/dependencies.md @@ -0,0 +1,124 @@ +--- +title: Dependencies +description: + Learn how to specify and manage dependencies in Nargo, allowing you to upload packages to GitHub + and use them easily in your project. +keywords: [Nargo, dependencies, GitHub, package management, versioning] +sidebar_position: 1 +--- + +Nargo allows you to upload packages to GitHub and use them as dependencies. + +## Specifying a dependency + +Specifying a dependency requires a tag to a specific commit and the git url to the url containing +the package. + +Currently, there are no requirements on the tag contents. If requirements are added, it would follow +semver 2.0 guidelines. + +> Note: Without a `tag` , there would be no versioning and dependencies would change each time you +> compile your project. + +For example, to add the [ecrecover-noir library](https://github.com/colinnielsen/ecrecover-noir) to your project, add it to `Nargo.toml`: + +```toml +# Nargo.toml + +[dependencies] +ecrecover = {tag = "v0.8.0", git = "https://github.com/colinnielsen/ecrecover-noir"} +``` + +If the module is in a subdirectory, you can define a subdirectory in your git repository, for example: + +```toml +# Nargo.toml + +[dependencies] +easy_private_token_contract = {tag ="v0.1.0-alpha62", git = "https://github.com/AztecProtocol/aztec-packages", directory = "yarn-project/noir-contracts/contracts/easy_private_token_contract"} +``` + +## Specifying a local dependency + +You can also specify dependencies that are local to your machine. + +For example, this file structure has a library and binary crate + +```tree +├── binary_crate +│   ├── Nargo.toml +│   └── src +│   └── main.nr +└── lib_a + ├── Nargo.toml + └── src + └── lib.nr +``` + +Inside of the binary crate, you can specify: + +```toml +# Nargo.toml + +[dependencies] +lib_a = { path = "../lib_a" } +``` + +## Importing dependencies + +You can import a dependency to a Noir file using the following syntax. For example, to import the +ecrecover-noir library and local lib_a referenced above: + +```rust +use dep::ecrecover; +use dep::lib_a; +``` + +You can also import only the specific parts of dependency that you want to use, like so: + +```rust +use dep::std::hash::sha256; +use dep::std::scalar_mul::fixed_base_embedded_curve; +``` + +Lastly, as demonstrated in the +[elliptic curve example](../standard_library/cryptographic_primitives/ec_primitives#examples), you +can import multiple items in the same line by enclosing them in curly braces: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; +``` + +We don't have a way to consume libraries from inside a [workspace](./workspaces) as external dependencies right now. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +## Dependencies of Dependencies + +Note that when you import a dependency, you also get access to all of the dependencies of that package. + +For example, the [phy_vector](https://github.com/resurgencelabs/phy_vector) library imports an [fraction](https://github.com/resurgencelabs/fraction) library. If you're importing the phy_vector library, then you can access the functions in fractions library like so: + +```rust +use dep::phy_vector; + +fn main(x : Field, y : pub Field) { + //... + let f = phy_vector::fraction::toFraction(true, 2, 1); + //... +} +``` + +## Available Libraries + +Noir does not currently have an official package manager. You can find a list of available Noir libraries in the [awesome-noir repo here](https://github.com/noir-lang/awesome-noir#libraries). + +Some libraries that are available today include: + +- [Standard Library](https://github.com/noir-lang/noir/tree/master/noir_stdlib) - the Noir Standard Library +- [Ethereum Storage Proof Verification](https://github.com/aragonzkresearch/noir-trie-proofs) - a library that contains the primitives necessary for RLP decoding (in the form of look-up table construction) and Ethereum state and storage proof verification (or verification of any trie proof involving 32-byte long keys) +- [BigInt](https://github.com/shuklaayush/noir-bigint) - a library that provides a custom BigUint56 data type, allowing for computations on large unsigned integers +- [ECrecover](https://github.com/colinnielsen/ecrecover-noir/tree/main) - a library to verify an ECDSA signature and return the source Ethereum address +- [Sparse Merkle Tree Verifier](https://github.com/vocdoni/smtverifier-noir/tree/main) - a library for verification of sparse Merkle trees +- [Signed Int](https://github.com/resurgencelabs/signed_int) - a library for accessing a custom Signed Integer data type, allowing access to negative numbers on Noir +- [Fraction](https://github.com/resurgencelabs/fraction) - a library for accessing fractional number data type in Noir, allowing results that aren't whole numbers diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md new file mode 100644 index 00000000000..ae822a1cff4 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/modules.md @@ -0,0 +1,105 @@ +--- +title: Modules +description: + Learn how to organize your files using modules in Noir, following the same convention as Rust's + module system. Examples included. +keywords: [Noir, Rust, modules, organizing files, sub-modules] +sidebar_position: 2 +--- + +Noir's module system follows the same convention as the _newer_ version of Rust's module system. + +## Purpose of Modules + +Modules are used to organize files. Without modules all of your code would need to live in a single +file. In Noir, the compiler does not automatically scan all of your files to detect modules. This +must be done explicitly by the developer. + +## Examples + +### Importing a module in the crate root + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::hello_world(); +} +``` + +Filename : `src/foo.nr` + +```rust +fn from_foo() {} +``` + +In the above snippet, the crate root is the `src/main.nr` file. The compiler sees the module +declaration `mod foo` which prompts it to look for a foo.nr file. + +Visually this module hierarchy looks like the following : + +``` +crate + ├── main + │ + └── foo + └── from_foo + +``` + +### Importing a module throughout the tree + +All modules are accessible from the `crate::` namespace. + +``` +crate + ├── bar + ├── foo + └── main + +``` + +In the above snippet, if `bar` would like to use functions in `foo`, it can do so by `use crate::foo::function_name`. + +### Sub-modules + +Filename : `src/main.nr` + +```rust +mod foo; + +fn main() { + foo::from_foo(); +} +``` + +Filename : `src/foo.nr` + +```rust +mod bar; +fn from_foo() {} +``` + +Filename : `src/foo/bar.nr` + +```rust +fn from_bar() {} +``` + +In the above snippet, we have added an extra module to the module tree; `bar`. `bar` is a submodule +of `foo` hence we declare bar in `foo.nr` with `mod bar`. Since `foo` is not the crate root, the +compiler looks for the file associated with the `bar` module in `src/foo/bar.nr` + +Visually the module hierarchy looks as follows: + +``` +crate + ├── main + │ + └── foo + ├── from_foo + └── bar + └── from_bar +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md new file mode 100644 index 00000000000..67a1dafa372 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/modules_packages_crates/workspaces.md @@ -0,0 +1,40 @@ +--- +title: Workspaces +sidebar_position: 3 +--- + +Workspaces are a feature of nargo that allow you to manage multiple related Noir packages in a single repository. A workspace is essentially a group of related projects that share common build output directories and configurations. + +Each Noir project (with it's own Nargo.toml file) can be thought of as a package. Each package is expected to contain exactly one "named circuit", being the "name" defined in Nargo.toml with the program logic defined in `./src/main.nr`. + +For a project with the following structure: + +```tree +├── crates +│   ├── a +│   │   ├── Nargo.toml +│   │   └── src +│   │   └── main.nr +│   └── b +│   ├── Nargo.toml +│   └── src +│   └── main.nr +├── Nargo.toml +└── Prover.toml +``` + +You can define a workspace in Nargo.toml like so: + +```toml +[workspace] +members = ["crates/a", "crates/b"] +default-member = "crates/a" +``` + +`members` indicates which packages are included in the workspace. As such, all member packages of a workspace will be processed when the `--workspace` flag is used with various commands or if a `default-member` is not specified. + +`default-member` indicates which package various commands process by default. + +Libraries can be defined in a workspace. Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. + +Inside a workspace, these are consumed as `{ path = "../to_lib" }` dependencies in Nargo.toml. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json new file mode 100644 index 00000000000..af04c0933fd --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Standard Library", + "position": 1, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/explanations/standard_library/black_box_fns.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md similarity index 96% rename from docs/docs/explanations/standard_library/black_box_fns.md rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md index 1dfabfe8f22..e0c6d475c1f 100644 --- a/docs/docs/explanations/standard_library/black_box_fns.md +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/black_box_fns.md @@ -43,4 +43,4 @@ Here is a list of the current black box functions that are supported by UltraPlo Most black box functions are included as part of the Noir standard library, however `AND`, `XOR` and `RANGE` are used as part of the Noir language syntax. For instance, using the bitwise operator `&` will invoke the `AND` black box function. To ensure compatibility across backends, the ACVM has fallback implementations of `AND`, `XOR` and `RANGE` defined in its standard library which it can seamlessly fallback to if the backend doesn't support them. -You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/acvm/blob/acir-v0.12.0/acir/src/circuit/black_box_functions.rs). +You can view the black box functions defined in the ACVM code [here](https://github.com/noir-lang/noir/blob/master/acvm-repo/acir/src/circuit/black_box_functions.rs). diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md new file mode 100644 index 00000000000..d2b42d67b7c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ec_primitives.md @@ -0,0 +1,102 @@ +--- +title: Elliptic Curve Primitives +keywords: [cryptographic primitives, Noir project] +sidebar_position: 4 +--- + +Data structures and methods on them that allow you to carry out computations involving elliptic +curves over the (mathematical) field corresponding to `Field`. For the field currently at our +disposal, applications would involve a curve embedded in BN254, e.g. the +[Baby Jubjub curve](https://eips.ethereum.org/EIPS/eip-2494). + +## Data structures + +### Elliptic curve configurations + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Curve`), i.e. the specific elliptic +curve you want to use, which would be specified using any one of the methods +`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::new` which take the coefficients in the +defining equation together with a generator point as parameters. You can find more detail in the +comments in +[`noir_stdlib/src/ec.nr`](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr), but +the gist of it is that the elliptic curves of interest are usually expressed in one of the standard +forms implemented here (Twisted Edwards, Montgomery and Short Weierstraß), and in addition to that, +you could choose to use `affine` coordinates (Cartesian coordinates - the usual (x,y) - possibly +together with a point at infinity) or `curvegroup` coordinates (some form of projective coordinates +requiring more coordinates but allowing for more efficient implementations of elliptic curve +operations). Conversions between all of these forms are provided, and under the hood these +conversions are done whenever an operation is more efficient in a different representation (or a +mixed coordinate representation is employed). + +### Points + +(`std::ec::{tecurve,montcurve,swcurve}::{affine,curvegroup}::Point`), i.e. points lying on the +elliptic curve. For a curve configuration `c` and a point `p`, it may be checked that `p` +does indeed lie on `c` by calling `c.contains(p1)`. + +## Methods + +(given a choice of curve representation, e.g. use `std::ec::tecurve::affine::Curve` and use +`std::ec::tecurve::affine::Point`) + +- The **zero element** is given by `Point::zero()`, and we can verify whether a point `p: Point` is + zero by calling `p.is_zero()`. +- **Equality**: Points `p1: Point` and `p2: Point` may be checked for equality by calling + `p1.eq(p2)`. +- **Addition**: For `c: Curve` and points `p1: Point` and `p2: Point` on the curve, adding these two + points is accomplished by calling `c.add(p1,p2)`. +- **Negation**: For a point `p: Point`, `p.negate()` is its negation. +- **Subtraction**: For `c` and `p1`, `p2` as above, subtracting `p2` from `p1` is accomplished by + calling `c.subtract(p1,p2)`. +- **Scalar multiplication**: For `c` as above, `p: Point` a point on the curve and `n: Field`, + scalar multiplication is given by `c.mul(n,p)`. If instead `n :: [u1; N]`, i.e. `n` is a bit + array, the `bit_mul` method may be used instead: `c.bit_mul(n,p)` +- **Multi-scalar multiplication**: For `c` as above and arrays `n: [Field; N]` and `p: [Point; N]`, + multi-scalar multiplication is given by `c.msm(n,p)`. +- **Coordinate representation conversions**: The `into_group` method converts a point or curve + configuration in the affine representation to one in the CurveGroup representation, and + `into_affine` goes in the other direction. +- **Curve representation conversions**: `tecurve` and `montcurve` curves and points are equivalent + and may be converted between one another by calling `into_montcurve` or `into_tecurve` on their + configurations or points. `swcurve` is more general and a curve c of one of the other two types + may be converted to this representation by calling `c.into_swcurve()`, whereas a point `p` lying + on the curve given by `c` may be mapped to its corresponding `swcurve` point by calling + `c.map_into_swcurve(p)`. +- **Map-to-curve methods**: The Elligator 2 method of mapping a field element `n: Field` into a + `tecurve` or `montcurve` with configuration `c` may be called as `c.elligator2_map(n)`. For all of + the curve configurations, the SWU map-to-curve method may be called as `c.swu_map(z,n)`, where + `z: Field` depends on `Field` and `c` and must be chosen by the user (the conditions it needs to + satisfy are specified in the comments + [here](https://github.com/noir-lang/noir/blob/master/noir_stdlib/src/ec.nr)). + +## Examples + +The +[ec_baby_jubjub test](https://github.com/noir-lang/noir/blob/master/test_programs/compile_success_empty/ec_baby_jubjub/src/main.nr) +illustrates all of the above primitives on various forms of the Baby Jubjub curve. A couple of more +interesting examples in Noir would be: + +Public-key cryptography: Given an elliptic curve and a 'base point' on it, determine the public key +from the private key. This is a matter of using scalar multiplication. In the case of Baby Jubjub, +for example, this code would do: + +```rust +use dep::std::ec::tecurve::affine::{Curve, Point}; + +fn bjj_pub_key(priv_key: Field) -> Point +{ + + let bjj = Curve::new(168700, 168696, G::new(995203441582195749578291179787384436505546430278305826713579947235728471134,5472060717959818805561601436314318772137091100104008585924551046643952123905)); + + let base_pt = Point::new(5299619240641551281634865583518297030282874472190772894086521144482721001553, 16950150798460657717958625567821834550301663161624707787222815936182638968203); + + bjj.mul(priv_key,base_pt) +} +``` + +This would come in handy in a Merkle proof. + +- EdDSA signature verification: This is a matter of combining these primitives with a suitable hash + function. See + [feat(stdlib): EdDSA sig verification noir#1136](https://github.com/noir-lang/noir/pull/1136) for + the case of Baby Jubjub and the Poseidon hash function. diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx new file mode 100644 index 00000000000..1376c51dfde --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/ecdsa_sig_verification.mdx @@ -0,0 +1,46 @@ +--- +title: ECDSA Signature Verification +description: Learn about the cryptographic primitives regarding ECDSA over the secp256k1 and secp256r1 curves +keywords: [cryptographic primitives, Noir project, ecdsa, secp256k1, secp256r1, signatures] +sidebar_position: 3 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +Noir supports ECDSA signatures verification over the secp256k1 and secp256r1 curves. + +## ecdsa_secp256k1::verify_signature + +Verifier for ECDSA Secp256k1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + +## ecdsa_secp256r1::verify_signature + +Verifier for ECDSA Secp256r1 signatures + +```rust +fn verify_signature(_public_key_x : [u8; 32], _public_key_y : [u8; 32], _signature: [u8; 64], _message: [u8]) -> bool +``` + +example: + +```rust +fn main(hashed_message : [u8;32], pub_key_x : [u8;32], pub_key_y : [u8;32], signature : [u8;64]) { + let valid_signature = std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message); + assert(valid_signature); +} +``` + + diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx new file mode 100644 index 00000000000..a9c10da6c06 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/eddsa.mdx @@ -0,0 +1,18 @@ +--- +title: EdDSA Verification +description: Learn about the cryptographic primitives regarding EdDSA +keywords: [cryptographic primitives, Noir project, eddsa, signatures] +sidebar_position: 5 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## eddsa::eddsa_poseidon_verify + +Verifier for EdDSA signatures + +```rust +fn eddsa_poseidon_verify(public_key_x : Field, public_key_y : Field, signature_s: Field, signature_r8_x: Field, signature_r8_y: Field, message: Field) -> bool +``` + + diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/hashes.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/hashes.mdx rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/hashes.mdx diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md new file mode 100644 index 00000000000..650f30165d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/index.md @@ -0,0 +1,14 @@ +--- +title: Cryptographic Primitives +description: + Learn about the cryptographic primitives ready to use for any Noir project +keywords: + [ + cryptographic primitives, + Noir project, + ] +--- + +The Noir team is progressively adding new cryptographic primitives to the standard library. Reach out for news or if you would be interested in adding more of these calculations in Noir. + +Some methods are available thanks to the Aztec backend, not being performed using Noir. When using other backends, these methods may or may not be supplied. diff --git a/docs/docs/explanations/standard_library/cryptographic_primitives/scalar.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx similarity index 100% rename from docs/docs/explanations/standard_library/cryptographic_primitives/scalar.mdx rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/scalar.mdx diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx new file mode 100644 index 00000000000..7a2c9c20226 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/cryptographic_primitives/schnorr.mdx @@ -0,0 +1,38 @@ +--- +title: Schnorr Signatures +description: Learn how you can verify Schnorr signatures using Noir +keywords: [cryptographic primitives, Noir project, schnorr, signatures] +sidebar_position: 2 +--- + +import BlackBoxInfo from '@site/src/components/Notes/_blackbox.mdx'; + +## schnorr::verify_signature + +Verifier for Schnorr signatures over the embedded curve (for BN254 it is Grumpkin). + +```rust +fn verify_signature(_public_key_x: Field, _public_key_y: Field, _signature: [u8; 64], _message: [u8]) -> bool +``` + +where `_signature` can be generated like so using the npm package +[@noir-lang/barretenberg](https://www.npmjs.com/package/@noir-lang/barretenberg) + +```js +const { BarretenbergWasm } = require('@noir-lang/barretenberg/dest/wasm'); +const { Schnorr } = require('@noir-lang/barretenberg/dest/crypto/schnorr'); + +... + +const barretenberg = await BarretenbergWasm.new(); +const schnorr = new Schnorr(barretenberg); +const pubKey = schnorr.computePublicKey(privateKey); +const message = ... +const signature = Array.from( + schnorr.constructSignature(hash, privateKey).toBuffer() +); + +... +``` + + diff --git a/docs/docs/explanations/standard_library/logging.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md similarity index 87% rename from docs/docs/explanations/standard_library/logging.md rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md index 16daf922e15..2e163b52ab3 100644 --- a/docs/docs/explanations/standard_library/logging.md +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/logging.md @@ -22,7 +22,7 @@ The standard library provides two familiar statements you can use: `println` and You can print the output of both statements in your Noir code by using the `nargo execute` command or the `--show-output` flag when using `nargo test` (provided there are print statements in your tests). -It is recommended to use `nargo execute` if you want to debug failing constrains with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. +It is recommended to use `nargo execute` if you want to debug failing constraints with `println` or `print` statements. This is due to every input in a test being a constant rather than a witness, so we issue an error during compilation while we only print during execution (which comes after compilation). Neither `println`, nor `print` are callable for failed constraints caught at compile time. Both `print` and `println` are generic functions which can work on integers, fields, strings, and even structs or expressions. Note however, that slices are currently unsupported. For example: diff --git a/docs/docs/explanations/standard_library/merkle_trees.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md similarity index 93% rename from docs/docs/explanations/standard_library/merkle_trees.md rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md index 07fa2ccda79..5b45617812a 100644 --- a/docs/docs/explanations/standard_library/merkle_trees.md +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/merkle_trees.md @@ -17,7 +17,7 @@ keywords: ## compute_merkle_root -Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](@site/docs/explanations/standard_library/cryptographic_primitives/hashes.mdx#pedersen_hash). +Returns the root of the tree from the provided leaf and its hash path, using a [Pedersen hash](./cryptographic_primitives/hashes.mdx#pedersen_hash). ```rust fn compute_merkle_root(leaf : Field, index : Field, hash_path: [Field]) -> Field diff --git a/docs/docs/explanations/standard_library/options.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md similarity index 100% rename from docs/docs/explanations/standard_library/options.md rename to docs/versioned_docs/version-v0.22.0/noir/standard_library/options.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md new file mode 100644 index 00000000000..67962082a8f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/recursion.md @@ -0,0 +1,90 @@ +--- +title: Recursive Proofs +description: Learn about how to write recursive proofs in Noir. +keywords: [recursion, recursive proofs, verification_key, aggregation object, verify_proof] +--- + +Noir supports recursively verifying proofs, meaning you verify the proof of a Noir program in another Noir program. This enables creating proofs of arbitrary size by doing step-wise verification of smaller components of a large proof. + +The `verify_proof` function takes a verification key, proof and public inputs for a zk program, as well as a key hash and an input aggregation object. The key hash is used to check the validity of the verification key and the input aggregation object is required by some proving systems. The `verify_proof` function returns an output aggregation object that can then be fed into future iterations of the proof verification if required. + +```rust +#[foreign(verify_proof)] +fn verify_proof(_verification_key : [Field], _proof : [Field], _public_input : Field, _key_hash : Field, _input_aggregation_object : [Field]) -> [Field] {} +``` + +:::info + +This is a black box function. Read [this section](./black_box_fns) to learn more about black box functions in Noir. + +::: + +## Example usage + +```rust +use dep::std; + +fn main( + verification_key : [Field; 114], + proof : [Field; 94], + public_inputs : [Field; 1], + key_hash : Field, + input_aggregation_object : [Field; 16], + proof_b : [Field; 94], +) -> pub [Field; 16] { + let output_aggregation_object_a = std::verify_proof( + verification_key.as_slice(), + proof.as_slice(), + public_inputs.as_slice(), + key_hash, + input_aggregation_object + ); + + let output_aggregation_object = std::verify_proof( + verification_key.as_slice(), + proof_b.as_slice(), + public_inputs.as_slice(), + key_hash, + output_aggregation_object_a + ); + + let mut output = [0; 16]; + for i in 0..16 { + output[i] = output_aggregation_object[i]; + } + output +} +``` + +## Parameters + +### `verification_key` + +The verification key for the zk program that is being verified. + +### `proof` + +The proof for the zk program that is being verified. + +### `public_inputs` + +These represent the public inputs of the proof we are verifying. They should be checked against in the circuit after construction of a new aggregation state. + +### `key_hash` + +A key hash is used to check the validity of the verification key. The circuit implementing this opcode can use this hash to ensure that the key provided to the circuit matches the key produced by the circuit creator. + +### `input_aggregation_object` + +An aggregation object is blob of data that the top-level verifier must run some proof system specific algorithm on to complete verification. The size is proof system specific and will be set by the backend integrating this opcode. The input aggregation object is only not `None` when we are verifying a previous recursive aggregation in the current circuit. If this is the first recursive aggregation there is no input aggregation object. It is left to the backend to determine how to handle when there is no input aggregation object. + +## Return value + +### `output_aggregation_object` + +This is the result of a recursive aggregation and is what will be fed into the next verifier. +The next verifier can either perform a final verification (returning true or false) or perform another recursive aggregation where this output aggregation object will be the input aggregation object of the next recursive aggregation. + +## Example + +You can see an example of how to do recursive proofs in [this example recursion demo repo](https://github.com/noir-lang/noir-examples/tree/master/recursion). diff --git a/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md b/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md new file mode 100644 index 00000000000..97dab02dac2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/standard_library/zeroed.md @@ -0,0 +1,25 @@ +--- +title: Zeroed Function +description: + The zeroed function returns a zeroed value of any type. +keywords: + [ + zeroed + ] +--- + +Implements `fn zeroed() -> T` to return a zeroed value of any type. This function is generally unsafe to use as the zeroed bit pattern is not guaranteed to be valid for all types. It can however, be useful in cases when the value is guaranteed not to be used such as in a BoundedVec library implementing a growable vector, up to a certain length, backed by an array. The array can be initialized with zeroed values which are guaranteed to be inaccessible until the vector is pushed to. Similarly, enumerations in noir can be implemented using this method by providing zeroed values for the unused variants. + +You can access the function at `std::unsafe::zeroed`. + +This function currently supports the following types: + +- Field +- Bool +- Uint +- Array +- String +- Tuple +- Function + +Using it on other types could result in unexpected behavior. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json new file mode 100644 index 00000000000..666b691ae91 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/_category_.json @@ -0,0 +1,6 @@ +{ + "label": "Syntax", + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md new file mode 100644 index 00000000000..c5f9aff139c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/assert.md @@ -0,0 +1,27 @@ +--- +title: Assert Function +description: + Learn about the assert function in Noir, which can be used to explicitly constrain the predicate or + comparison expression that follows to be true, and what happens if the expression is false at + runtime. +keywords: [Noir programming language, assert statement, predicate expression, comparison expression] +sidebar_position: 4 +--- + +Noir includes a special `assert` function which will explicitly constrain the predicate/comparison +expression that follows to be true. If this expression is false at runtime, the program will fail to +be proven. Example: + +```rust +fn main(x : Field, y : Field) { + assert(x == y); +} +``` + +You can optionally provide a message to be logged when the assertion fails: + +```rust +assert(x == y, "x and y are not equal"); +``` + +> Assertions only work for predicate operations, such as `==`. If there's any ambiguity on the operation, the program will fail to compile. For example, it is unclear if `assert(x + y)` would check for `x + y == 0` or simply would return `true`. diff --git a/docs/docs/explanations/noir/comments.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md similarity index 100% rename from docs/docs/explanations/noir/comments.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/comments.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md new file mode 100644 index 00000000000..4ce65236db3 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/control_flow.md @@ -0,0 +1,45 @@ +--- +title: Control Flow +description: + Learn how to use loops and if expressions in the Noir programming language. Discover the syntax + and examples for for loops and if-else statements. +keywords: [Noir programming language, loops, for loop, if-else statements, Rust syntax] +sidebar_position: 2 +--- + +## Loops + +Noir has one kind of loop: the `for` loop. `for` loops allow you to repeat a block of code multiple +times. + +The following block of code between the braces is run 10 times. + +```rust +for i in 0..10 { + // do something +}; +``` + +The index for loops is of type `u64`. + +## If Expressions + +Noir supports `if-else` statements. The syntax is most similar to Rust's where it is not required +for the statement's conditional to be surrounded by parentheses. + +```rust +let a = 0; +let mut x: u32 = 0; + +if a == 0 { + if a != 0 { + x = 6; + } else { + x = 2; + } +} else { + x = 5; + assert(x == 5); +} +assert(x == 2); +``` diff --git a/docs/docs/explanations/noir/data_bus.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md similarity index 100% rename from docs/docs/explanations/noir/data_bus.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/data_bus.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json new file mode 100644 index 00000000000..5d694210bbf --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 0, + "collapsible": true, + "collapsed": true +} diff --git a/docs/docs/explanations/noir/data_types/arrays.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md similarity index 100% rename from docs/docs/explanations/noir/data_types/arrays.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/arrays.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md new file mode 100644 index 00000000000..69826fcd724 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/booleans.md @@ -0,0 +1,31 @@ +--- +title: Booleans +description: + Delve into the Boolean data type in Noir. Understand its methods, practical examples, and best practices for using Booleans in your Noir programs. +keywords: + [ + noir, + boolean type, + methods, + examples, + logical operations, + ] +sidebar_position: 2 +--- + + +The `bool` type in Noir has two possible values: `true` and `false`: + +```rust +fn main() { + let t = true; + let f: bool = false; +} +``` + +> **Note:** When returning a boolean value, it will show up as a value of 1 for `true` and 0 for +> `false` in _Verifier.toml_. + +The boolean type is most commonly used in conditionals like `if` expressions and `assert` +statements. More about conditionals is covered in the [Control Flow](../control_flow) and +[Assert Function](../assert) sections. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md new file mode 100644 index 00000000000..a1c67945d66 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/fields.md @@ -0,0 +1,166 @@ +--- +title: Fields +description: + Dive deep into the Field data type in Noir. Understand its methods, practical examples, and best practices to effectively use Fields in your Noir programs. +keywords: + [ + noir, + field type, + methods, + examples, + best practices, + ] +sidebar_position: 0 +--- + +The field type corresponds to the native field type of the proving backend. + +The size of a Noir field depends on the elliptic curve's finite field for the proving backend +adopted. For example, a field would be a 254-bit integer when paired with the default backend that +spans the Grumpkin curve. + +Fields support integer arithmetic and are often used as the default numeric type in Noir: + +```rust +fn main(x : Field, y : Field) { + let z = x + y; +} +``` + +`x`, `y` and `z` are all private fields in this example. Using the `let` keyword we defined a new +private value `z` constrained to be equal to `x + y`. + +If proving efficiency is of priority, fields should be used as a default for solving problems. +Smaller integer types (e.g. `u64`) incur extra range constraints. + +## Methods + +After declaring a Field, you can use these common methods on it: + +### to_le_bits + +Transforms the field into an array of bits, Little Endian. + +```rust +fn to_le_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_le_bits(32); +} +``` + +### to_be_bits + +Transforms the field into an array of bits, Big Endian. + +```rust +fn to_be_bits(_x : Field, _bit_size: u32) -> [u1; N] +``` + +example: + +```rust +fn main() { + let field = 2; + let bits = field.to_be_bits(32); +} +``` + +### to_le_bytes + +Transforms into an array of bytes, Little Endian + +```rust +fn to_le_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_le_bytes(4); +} +``` + +### to_be_bytes + +Transforms into an array of bytes, Big Endian + +```rust +fn to_be_bytes(_x : Field, byte_size: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let bytes = field.to_be_bytes(4); +} +``` + +### to_le_radix + +Decomposes into a vector over the specified base, Little Endian + +```rust +fn to_le_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_le_radix(256, 4); +} +``` + +### to_be_radix + +Decomposes into a vector over the specified base, Big Endian + +```rust +fn to_be_radix(_x : Field, _radix: u32, _result_len: u32) -> [u8] +``` + +example: + +```rust +fn main() { + let field = 2; + let radix = field.to_be_radix(256, 4); +} +``` + +### pow_32 + +Returns the value to the power of the specified exponent + +```rust +fn pow_32(self, exponent: Field) -> Field +``` + +example: + +```rust +fn main() { + let field = 2 + let pow = field.pow_32(4); + assert(pow == 16); +} +``` + +### sgn0 + +Parity of (prime) Field element, i.e. sgn0(x mod p) = 0 if x ∈ \{0, ..., p-1\} is even, otherwise sgn0(x mod p) = 1. + +```rust +fn sgn0(self) -> u1 +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md new file mode 100644 index 00000000000..f6121af17e2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/function_types.md @@ -0,0 +1,26 @@ +--- +title: Function types +sidebar_position: 10 +--- + +Noir supports higher-order functions. The syntax for a function type is as follows: + +```rust +fn(arg1_type, arg2_type, ...) -> return_type +``` + +Example: + +```rust +fn assert_returns_100(f: fn() -> Field) { // f takes no args and returns a Field + assert(f() == 100); +} + +fn main() { + assert_returns_100(|| 100); // ok + assert_returns_100(|| 150); // fails +} +``` + +A function type also has an optional capture environment - this is necessary to support closures. +See [Lambdas](../lambdas.md) for more details. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md new file mode 100644 index 00000000000..f09bca0ee04 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/index.md @@ -0,0 +1,96 @@ +--- +title: Data Types +description: + Get a clear understanding of the two categories of Noir data types - primitive types and compound + types. Learn about their characteristics, differences, and how to use them in your Noir + programming. +keywords: + [ + noir, + data types, + primitive types, + compound types, + private types, + public types, + ] +--- + +Every value in Noir has a type, which determines which operations are valid for it. + +All values in Noir are fundamentally composed of `Field` elements. For a more approachable +developing experience, abstractions are added on top to introduce different data types in Noir. + +Noir has two category of data types: primitive types (e.g. `Field`, integers, `bool`) and compound +types that group primitive types (e.g. arrays, tuples, structs). Each value can either be private or +public. + +## Private & Public Types + +A **private value** is known only to the Prover, while a **public value** is known by both the +Prover and Verifier. Mark values as `private` when the value should only be known to the prover. All +primitive types (including individual fields of compound types) in Noir are private by default, and +can be marked public when certain values are intended to be revealed to the Verifier. + +> **Note:** For public values defined in Noir programs paired with smart contract verifiers, once +> the proofs are verified on-chain the values can be considered known to everyone that has access to +> that blockchain. + +Public data types are treated no differently to private types apart from the fact that their values +will be revealed in proofs generated. Simply changing the value of a public type will not change the +circuit (where the same goes for changing values of private types as well). + +_Private values_ are also referred to as _witnesses_ sometimes. + +> **Note:** The terms private and public when applied to a type (e.g. `pub Field`) have a different +> meaning than when applied to a function (e.g. `pub fn foo() {}`). +> +> The former is a visibility modifier for the Prover to interpret if a value should be made known to +> the Verifier, while the latter is a visibility modifier for the compiler to interpret if a +> function should be made accessible to external Noir programs like in other languages. + +### pub Modifier + +All data types in Noir are private by default. Types are explicitly declared as public using the +`pub` modifier: + +```rust +fn main(x : Field, y : pub Field) -> pub Field { + x + y +} +``` + +In this example, `x` is **private** while `y` and `x + y` (the return value) are **public**. Note +that visibility is handled **per variable**, so it is perfectly valid to have one input that is +private and another that is public. + +> **Note:** Public types can only be declared through parameters on `main`. + +## Type Aliases + +A type alias is a new name for an existing type. Type aliases are declared with the keyword `type`: + +```rust +type Id = u8; + +fn main() { + let id: Id = 1; + let zero: u8 = 0; + assert(zero + 1 == id); +} +``` + +Type aliases can also be used with [generics](../generics.md): + +```rust +type Id = Size; + +fn main() { + let id: Id = 1; + let zero: u32 = 0; + assert(zero + 1 == id); +} +``` + +### BigInt + +You can achieve BigInt functionality using the [Noir BigInt](https://github.com/shuklaayush/noir-bigint) library. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md new file mode 100644 index 00000000000..7d1e83cf4e9 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/integers.md @@ -0,0 +1,113 @@ +--- +title: Integers +description: Explore the Integer data type in Noir. Learn about its methods, see real-world examples, and grasp how to efficiently use Integers in your Noir code. +keywords: [noir, integer types, methods, examples, arithmetic] +sidebar_position: 1 +--- + +An integer type is a range constrained field type. The Noir frontend supports arbitrarily-sized, both unsigned and signed integer types. + +:::info + +When an integer is defined in Noir without a specific type, it will default to `Field`. + +The one exception is for loop indices which default to `u64` since comparisons on `Field`s are not possible. + +::: + +## Unsigned Integers + +An unsigned integer type is specified first with the letter `u` (indicating its unsigned nature) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: u8 = 1; + let y: u8 = 1; + let z = x + y; + assert (z == 2); +} +``` + +The bit size determines the maximum value the integer type can store. For example, a `u8` variable can store a value in the range of 0 to 255 (i.e. $\\2^{8}-1\\$). + +## Signed Integers + +A signed integer type is specified first with the letter `i` (which stands for integer) followed by its bit size (e.g. `8`): + +```rust +fn main() { + let x: i8 = -1; + let y: i8 = -1; + let z = x + y; + assert (z == -2); +} +``` + +The bit size determines the maximum and minimum range of value the integer type can store. For example, an `i8` variable can store a value in the range of -128 to 127 (i.e. $\\-2^{7}\\$ to $\\2^{7}-1\\$). + +:::tip + +If you are using the default proving backend with Noir, both even (e.g. _u2_, _i2_) and odd (e.g. _u3_, _i3_) arbitrarily-sized integer types up to 127 bits (i.e. _u127_ and _i127_) are supported. + +::: + +## Overflows + +Computations that exceed the type boundaries will result in overflow errors. This happens with both signed and unsigned integers. For example, attempting to prove: + +```rust +fn main(x: u8, y: u8) { + let z = x + y; +} +``` + +With: + +```toml +x = "255" +y = "1" +``` + +Would result in: + +``` +$ nargo prove +error: Assertion failed: 'attempt to add with overflow' +┌─ ~/src/main.nr:9:13 +│ +│ let z = x + y; +│ ----- +│ += Call stack: + ... +``` + +A similar error would happen with signed integers: + +```rust +fn main() { + let x: i8 = -118; + let y: i8 = -11; + let z = x + y; +} +``` + +### Wrapping methods + +Although integer overflow is expected to error, some use-cases rely on wrapping. For these use-cases, the standard library provides `wrapping` variants of certain common operations: + +```rust +fn wrapping_add(x: T, y: T) -> T; +fn wrapping_sub(x: T, y: T) -> T; +fn wrapping_mul(x: T, y: T) -> T; +``` + +Example of how it is used: + +```rust +use dep::std; + +fn main(x: u8, y: u8) -> pub u8 { + std::wrapping_add(x + y) +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md new file mode 100644 index 00000000000..a5293d11cfb --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/references.md @@ -0,0 +1,23 @@ +--- +title: References +sidebar_position: 9 +--- + +Noir supports first-class references. References are a bit like pointers: they point to a specific address that can be followed to access the data stored at that address. You can use Rust-like syntax to use pointers in Noir: the `&` operator references the variable, the `*` operator dereferences it. + +Example: + +```rust +fn main() { + let mut x = 2; + + // you can reference x as &mut and pass it to multiplyBy2 + multiplyBy2(&mut x); +} + +// you can access &mut here +fn multiplyBy2(x: &mut Field) { + // and dereference it with * + *x = *x * 2; +} +``` diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx new file mode 100644 index 00000000000..4a6ee816aa2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/slices.mdx @@ -0,0 +1,147 @@ +--- +title: Slices +description: Explore the Slice data type in Noir. Understand its methods, see real-world examples, and learn how to effectively use Slices in your Noir programs. +keywords: [noir, slice type, methods, examples, subarrays] +sidebar_position: 5 +--- + +import Experimental from '@site/src/components/Notes/_experimental.mdx'; + + + +A slice is a dynamically-sized view into a sequence of elements. They can be resized at runtime, but because they don't own the data, they cannot be returned from a circuit. You can treat slices as arrays without a constrained size. + +```rust +use dep::std::slice; + +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +[test-file]: https://github.com/noir-lang/noir/blob/f387ec1475129732f72ba294877efdf6857135ac/crates/nargo_cli/tests/test_data_ssa_refactor/slices/src/main.nr + +## Methods + +For convenience, the STD provides some ready-to-use, common methods for slices: + +### push_back + +Pushes a new element to the end of the slice, returning a new slice with a length one greater than the original unmodified slice. + +```rust +fn push_back(_self: [T], _elem: T) -> [T] +``` + +example: + +```rust +fn main() -> pub Field { + let mut slice: [Field] = [0; 2]; + + let mut new_slice = slice.push_back(6); + new_slice.len() +} +``` + +View the corresponding test file [here][test-file]. + +### push_front + +Returns a new array with the specified element inserted at index 0. The existing elements indexes are incremented by 1. + +```rust +fn push_front(_self: Self, _elem: T) -> Self +``` + +Example: + +```rust +let mut new_slice: [Field] = []; +new_slice = new_slice.push_front(20); +assert(new_slice[0] == 20); // returns true +``` + +View the corresponding test file [here][test-file]. + +### pop_front + +Returns a tuple of two items, the first element of the array and the rest of the array. + +```rust +fn pop_front(_self: Self) -> (T, Self) +``` + +Example: + +```rust +let (first_elem, rest_of_slice) = slice.pop_front(); +``` + +View the corresponding test file [here][test-file]. + +### pop_back + +Returns a tuple of two items, the beginning of the array with the last element omitted and the last element. + +```rust +fn pop_back(_self: Self) -> (Self, T) +``` + +Example: + +```rust +let (popped_slice, last_elem) = slice.pop_back(); +``` + +View the corresponding test file [here][test-file]. + +### append + +Loops over a slice and adds it to the end of another. + +```rust +fn append(mut self, other: Self) -> Self +``` + +Example: + +```rust +let append = [1, 2].append([3, 4, 5]); +``` + +### insert + +Inserts an element at a specified index and shifts all following elements by 1. + +```rust +fn insert(_self: Self, _index: Field, _elem: T) -> Self +``` + +Example: + +```rust +new_slice = rest_of_slice.insert(2, 100); +assert(new_slice[2] == 100); +``` + +View the corresponding test file [here][test-file]. + +### remove + +Remove an element at a specified index, shifting all elements after it to the left, returning the altered slice and the removed element. + +```rust +fn remove(_self: Self, _index: Field) -> (Self, T) +``` + +Example: + +```rust +let (remove_slice, removed_elem) = slice.remove(3); +``` diff --git a/docs/docs/explanations/noir/data_types/strings.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md similarity index 100% rename from docs/docs/explanations/noir/data_types/strings.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/strings.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md new file mode 100644 index 00000000000..dbf68c99813 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/structs.md @@ -0,0 +1,70 @@ +--- +title: Structs +description: + Explore the Struct data type in Noir. Learn about its methods, see real-world examples, and grasp how to effectively define and use Structs in your Noir programs. +keywords: + [ + noir, + struct type, + methods, + examples, + data structures, + ] +sidebar_position: 8 +--- + +A struct also allows for grouping multiple values of different types. Unlike tuples, we can also +name each field. + +> **Note:** The usage of _field_ here refers to each element of the struct and is unrelated to the +> field type of Noir. + +Defining a struct requires giving it a name and listing each field within as `: ` pairs: + +```rust +struct Animal { + hands: Field, + legs: Field, + eyes: u8, +} +``` + +An instance of a struct can then be created with actual values in `: ` pairs in any +order. Struct fields are accessible using their given names: + +```rust +fn main() { + let legs = 4; + + let dog = Animal { + eyes: 2, + hands: 0, + legs, + }; + + let zero = dog.hands; +} +``` + +Structs can also be destructured in a pattern, binding each field to a new variable: + +```rust +fn main() { + let Animal { hands, legs: feet, eyes } = get_octopus(); + + let ten = hands + feet + eyes as u8; +} + +fn get_octopus() -> Animal { + let octopus = Animal { + hands: 0, + legs: 8, + eyes: 2, + }; + + octopus +} +``` + +The new variables can be bound with names different from the original struct field names, as +showcased in the `legs --> feet` binding in the example above. diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md new file mode 100644 index 00000000000..2ec5c9c4113 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/tuples.md @@ -0,0 +1,48 @@ +--- +title: Tuples +description: + Dive into the Tuple data type in Noir. Understand its methods, practical examples, and best practices for efficiently using Tuples in your Noir code. +keywords: + [ + noir, + tuple type, + methods, + examples, + multi-value containers, + ] +sidebar_position: 7 +--- + +A tuple collects multiple values like an array, but with the added ability to collect values of +different types: + +```rust +fn main() { + let tup: (u8, u64, Field) = (255, 500, 1000); +} +``` + +One way to access tuple elements is via destructuring using pattern matching: + +```rust +fn main() { + let tup = (1, 2); + + let (one, two) = tup; + + let three = one + two; +} +``` + +Another way to access tuple elements is via direct member access, using a period (`.`) followed by +the index of the element we want to access. Index `0` corresponds to the first tuple element, `1` to +the second and so on: + +```rust +fn main() { + let tup = (5, 6, 7, 8); + + let five = tup.0; + let eight = tup.3; +} +``` diff --git a/docs/docs/explanations/noir/data_types/vectors.mdx b/docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx similarity index 100% rename from docs/docs/explanations/noir/data_types/vectors.mdx rename to docs/versioned_docs/version-v0.22.0/noir/syntax/data_types/vectors.mdx diff --git a/docs/docs/explanations/noir/distinct.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md similarity index 100% rename from docs/docs/explanations/noir/distinct.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/distinct.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md new file mode 100644 index 00000000000..48aba9cd058 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/functions.md @@ -0,0 +1,226 @@ +--- +title: Functions +description: + Learn how to declare functions and methods in Noir, a programming language with Rust semantics. + This guide covers parameter declaration, return types, call expressions, and more. +keywords: [Noir, Rust, functions, methods, parameter declaration, return types, call expressions] +sidebar_position: 1 +--- + +Functions in Noir follow the same semantics of Rust, though Noir does not support early returns. + +To declare a function the `fn` keyword is used. + +```rust +fn foo() {} +``` + +By default, functions are visible only within the package they are defined. To make them visible outside of that package (for example, as part of a [library](../modules_packages_crates/crates_and_packages.md#libraries)), you should mark them as `pub`: + +```rust +pub fn foo() {} +``` + +You can also restrict the visibility of the function to only the crate it was defined in, by specifying `pub(crate)`: + +```rust +pub(crate) fn foo() {} //foo can only be called within its crate +``` + +All parameters in a function must have a type and all types are known at compile time. The parameter +is pre-pended with a colon and the parameter type. Multiple parameters are separated using a comma. + +```rust +fn foo(x : Field, y : Field){} +``` + +The return type of a function can be stated by using the `->` arrow notation. The function below +states that the foo function must return a `Field`. If the function returns no value, then the arrow +is omitted. + +```rust +fn foo(x : Field, y : Field) -> Field { + x + y +} +``` + +Note that a `return` keyword is unneeded in this case - the last expression in a function's body is +returned. + +## Main function + +If you're writing a binary, the `main` function is the starting point of your program. You can pass all types of expressions to it, as long as they have a fixed size at compile time: + +```rust +fn main(x : Field) // this is fine: passing a Field +fn main(x : [Field; 2]) // this is also fine: passing a Field with known size at compile-time +fn main(x : (Field, bool)) // 👌: passing a (Field, bool) tuple means size 2 +fn main(x : str<5>) // this is fine, as long as you pass a string of size 5 + +fn main(x : Vec) // can't compile, has variable size +fn main(x : [Field]) // can't compile, has variable size +fn main(....// i think you got it by now +``` + +Keep in mind [tests](../../getting_started/tooling/testing.md) don't differentiate between `main` and any other function. The following snippet passes tests, but won't compile or prove: + +```rust +fn main(x : [Field]) { + assert(x[0] == 1); +} + +#[test] +fn test_one() { + main([1, 2]); +} +``` + +```bash +$ nargo test +[testing] Running 1 test functions +[testing] Testing test_one... ok +[testing] All tests passed + +$ nargo check +The application panicked (crashed). +Message: Cannot have variable sized arrays as a parameter to main +``` + +## Call Expressions + +Calling a function in Noir is executed by using the function name and passing in the necessary +arguments. + +Below we show how to call the `foo` function from the `main` function using a call expression: + +```rust +fn main(x : Field, y : Field) { + let z = foo(x); +} + +fn foo(x : Field) -> Field { + x + x +} +``` + +## Methods + +You can define methods in Noir on any struct type in scope. + +```rust +struct MyStruct { + foo: Field, + bar: Field, +} + +impl MyStruct { + fn new(foo: Field) -> MyStruct { + MyStruct { + foo, + bar: 2, + } + } + + fn sum(self) -> Field { + self.foo + self.bar + } +} + +fn main() { + let s = MyStruct::new(40); + assert(s.sum() == 42); +} +``` + +Methods are just syntactic sugar for functions, so if we wanted to we could also call `sum` as +follows: + +```rust +assert(MyStruct::sum(s) == 42); +``` + +It is also possible to specialize which method is chosen depending on the [generic](./generics.md) type that is used. In this example, the `foo` function returns different values depending on its type: + +```rust +struct Foo {} + +impl Foo { + fn foo(self) -> Field { 1 } +} + +impl Foo { + fn foo(self) -> Field { 2 } +} + +fn main() { + let f1: Foo = Foo{}; + let f2: Foo = Foo{}; + assert(f1.foo() + f2.foo() == 3); +} +``` + +Also note that impls with the same method name defined in them cannot overlap. For example, if we already have `foo` defined for `Foo` and `Foo` like we do above, we cannot also define `foo` in an `impl Foo` since it would be ambiguous which version of `foo` to choose. + +```rust +// Including this impl in the same project as the above snippet would +// cause an overlapping impls error +impl Foo { + fn foo(self) -> Field { 3 } +} +``` + +## Lambdas + +Lambdas are anonymous functions. They follow the syntax of Rust - `|arg1, arg2, ..., argN| return_expression`. + +```rust +let add_50 = |val| val + 50; +assert(add_50(100) == 150); +``` + +See [Lambdas](./lambdas.md) for more details. + +## Attributes + +Attributes are metadata that can be applied to a function, using the following syntax: `#[attribute(value)]`. + +Supported attributes include: + +- **builtin**: the function is implemented by the compiler, for efficiency purposes. +- **deprecated**: mark the function as _deprecated_. Calling the function will generate a warning: `warning: use of deprecated function` +- **field**: Used to enable conditional compilation of code depending on the field size. See below for more details +- **oracle**: mark the function as _oracle_; meaning it is an external unconstrained function, implemented in noir_js. See [Unconstrained](./unconstrained.md) and [NoirJS](../../reference/NoirJS/noir_js/index.md) for more details. +- **test**: mark the function as unit tests. See [Tests](../../getting_started/tooling/testing.md) for more details + +### Field Attribute + +The field attribute defines which field the function is compatible for. The function is conditionally compiled, under the condition that the field attribute matches the Noir native field. +The field can be defined implicitly, by using the name of the elliptic curve usually associated to it - for instance bn254, bls12_381 - or explicitly by using the field (prime) order, in decimal or hexadecimal form. +As a result, it is possible to define multiple versions of a function with each version specialized for a different field attribute. This can be useful when a function requires different parameters depending on the underlying elliptic curve. + +Example: we define the function `foo()` three times below. Once for the default Noir bn254 curve, once for the field $\mathbb F_{23}$, which will normally never be used by Noir, and once again for the bls12_381 curve. + +```rust +#[field(bn254)] +fn foo() -> u32 { + 1 +} + +#[field(23)] +fn foo() -> u32 { + 2 +} + +// This commented code would not compile as foo would be defined twice because it is the same field as bn254 +// #[field(21888242871839275222246405745257275088548364400416034343698204186575808495617)] +// fn foo() -> u32 { +// 2 +// } + +#[field(bls12_381)] +fn foo() -> u32 { + 3 +} +``` + +If the field name is not known to Noir, it will discard the function. Field names are case insensitive. diff --git a/docs/docs/explanations/noir/generics.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md similarity index 96% rename from docs/docs/explanations/noir/generics.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md index 443ca2b45a5..d59e4c5d7c6 100644 --- a/docs/docs/explanations/noir/generics.md +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/generics.md @@ -111,4 +111,4 @@ fn main() { ``` You can see an example of generics in the tests -[here](https://github.com/noir-lang/noir/blob/master/tooling/nargo_cli/tests/execution_success/generics/src/main.nr). +[here](https://github.com/noir-lang/noir/blob/master/test_programs/execution_success/generics/src/main.nr). diff --git a/docs/docs/explanations/noir/lambdas.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md similarity index 100% rename from docs/docs/explanations/noir/lambdas.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/lambdas.md diff --git a/docs/docs/explanations/noir/mutability.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md similarity index 97% rename from docs/docs/explanations/noir/mutability.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md index 58e9c1cecfb..6abfae3cfa7 100644 --- a/docs/docs/explanations/noir/mutability.md +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/mutability.md @@ -70,11 +70,11 @@ fn main(x : Field, y : [Field; N]) { assert(res == y[0]); - let res2 = x * mysubmodule::N; + let res2 = x * my_submodule::N; assert(res != res2); } -mod mysubmodule { +mod my_submodule { use dep::std; global N: Field = 10; diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md new file mode 100644 index 00000000000..60425cb8994 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/ops.md @@ -0,0 +1,98 @@ +--- +title: Logical Operations +description: + Learn about the supported arithmetic and logical operations in the Noir programming language. + Discover how to perform operations on private input types, integers, and booleans. +keywords: + [ + Noir programming language, + supported operations, + arithmetic operations, + logical operations, + predicate operators, + bitwise operations, + short-circuiting, + backend, + ] +sidebar_position: 3 +--- + +# Operations + +## Table of Supported Operations + +| Operation | Description | Requirements | +| :-------- | :------------------------------------------------------------: | -------------------------------------: | +| + | Adds two private input types together | Types must be private input | +| - | Subtracts two private input types together | Types must be private input | +| \* | Multiplies two private input types together | Types must be private input | +| / | Divides two private input types together | Types must be private input | +| ^ | XOR two private input types together | Types must be integer | +| & | AND two private input types together | Types must be integer | +| \| | OR two private input types together | Types must be integer | +| \<\< | Left shift an integer by another integer amount | Types must be integer | +| >> | Right shift an integer by another integer amount | Types must be integer | +| ! | Bitwise not of a value | Type must be integer or boolean | +| \< | returns a bool if one value is less than the other | Upper bound must have a known bit size | +| \<= | returns a bool if one value is less than or equal to the other | Upper bound must have a known bit size | +| > | returns a bool if one value is more than the other | Upper bound must have a known bit size | +| >= | returns a bool if one value is more than or equal to the other | Upper bound must have a known bit size | +| == | returns a bool if one value is equal to the other | Both types must not be constants | +| != | returns a bool if one value is not equal to the other | Both types must not be constants | + +### Predicate Operators + +`<,<=, !=, == , >, >=` are known as predicate/comparison operations because they compare two values. +This differs from the operations such as `+` where the operands are used in _computation_. + +### Bitwise Operations Example + +```rust +fn main(x : Field) { + let y = x as u32; + let z = y & y; +} +``` + +`z` is implicitly constrained to be the result of `y & y`. The `&` operand is used to denote bitwise +`&`. + +> `x & x` would not compile as `x` is a `Field` and not an integer type. + +### Logical Operators + +Noir has no support for the logical operators `||` and `&&`. This is because encoding the +short-circuiting that these operators require can be inefficient for Noir's backend. Instead you can +use the bitwise operators `|` and `&` which operate identically for booleans, just without the +short-circuiting. + +```rust +let my_val = 5; + +let mut flag = 1; +if (my_val > 6) | (my_val == 0) { + flag = 0; +} +assert(flag == 1); + +if (my_val != 10) & (my_val < 50) { + flag = 0; +} +assert(flag == 0); +``` + +### Shorthand operators + +Noir shorthand operators for most of the above operators, namely `+=, -=, *=, /=, %=, &=, |=, ^=, <<=`, and `>>=`. These allow for more concise syntax. For example: + +```rust +let mut i = 0; +i = i + 1; +``` + +could be written as: + +```rust +let mut i = 0; +i += 1; +``` diff --git a/docs/docs/explanations/noir/shadowing.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md similarity index 100% rename from docs/docs/explanations/noir/shadowing.md rename to docs/versioned_docs/version-v0.22.0/noir/syntax/shadowing.md diff --git a/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md b/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md new file mode 100644 index 00000000000..6b3424f7993 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/noir/syntax/unconstrained.md @@ -0,0 +1,95 @@ +--- +title: Unconstrained Functions +description: "Learn about what unconstrained functions in Noir are, how to use them and when you'd want to." + +keywords: [Noir programming language, unconstrained, open] +sidebar_position: 5 +--- + +Unconstrained functions are functions which do not constrain any of the included computation and allow for non-deterministic computation. + +## Why? + +Zero-knowledge (ZK) domain-specific languages (DSL) enable developers to generate ZK proofs from their programs by compiling code down to the constraints of an NP complete language (such as R1CS or PLONKish languages). However, the hard bounds of a constraint system can be very limiting to the functionality of a ZK DSL. + +Enabling a circuit language to perform unconstrained execution is a powerful tool. Said another way, unconstrained execution lets developers generate witnesses from code that does not generate any constraints. Being able to execute logic outside of a circuit is critical for both circuit performance and constructing proofs on information that is external to a circuit. + +Fetching information from somewhere external to a circuit can also be used to enable developers to improve circuit efficiency. + +A ZK DSL does not just prove computation, but proves that some computation was handled correctly. Thus, it is necessary that when we switch from performing some operation directly inside of a circuit to inside of an unconstrained environment that the appropriate constraints are still laid down elsewhere in the circuit. + +## Example + +An in depth example might help drive the point home. This example comes from the excellent [post](https://discord.com/channels/1113924620781883405/1124022445054111926/1128747641853972590) by Tom in the Noir Discord. + +Let's look at how we can optimize a function to turn a `u72` into an array of `u8`s. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u72 & 0xff) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 91 +Backend circuit size: 3619 +``` + +A lot of the operations in this function are optimized away by the compiler (all the bit-shifts turn into divisions by constants). However we can save a bunch of gates by casting to u8 a bit earlier. This automatically truncates the bit-shifted value to fit in a u8 which allows us to remove the XOR against 0xff. This saves us ~480 gates in total. + +```rust +fn main(num: u72) -> pub [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8)) as u8; + } + + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 75 +Backend circuit size: 3143 +``` + +Those are some nice savings already but we can do better. This code is all constrained so we're proving every step of calculating out using num, but we don't actually care about how we calculate this, just that it's correct. This is where brillig comes in. + +It turns out that truncating a u72 into a u8 is hard to do inside a snark, each time we do as u8 we lay down 4 ACIR opcodes which get converted into multiple gates. It's actually much easier to calculate num from out than the other way around. All we need to do is multiply each element of out by a constant and add them all together, both relatively easy operations inside a snark. + +We can then run u72_to_u8 as unconstrained brillig code in order to calculate out, then use that result in our constrained function and assert that if we were to do the reverse calculation we'd get back num. This looks a little like the below: + +```rust +fn main(num: u72) -> pub [u8; 8] { + let out = u72_to_u8(num); + + let mut reconstructed_num: u72 = 0; + for i in 0..8 { + reconstructed_num += (out[i] as u72 << (56 - (8 * i))); + } + assert(num == reconstructed_num); + out +} + +unconstrained fn u72_to_u8(num: u72) -> [u8; 8] { + let mut out: [u8; 8] = [0; 8]; + for i in 0..8 { + out[i] = (num >> (56 - (i * 8))) as u8; + } + out +} +``` + +``` +Total ACIR opcodes generated for language PLONKCSat { width: 3 }: 78 +Backend circuit size: 2902 +``` + +This ends up taking off another ~250 gates from our circuit! We've ended up with more ACIR opcodes than before but they're easier for the backend to prove (resulting in fewer gates). + +Generally we want to use brillig whenever there's something that's easy to verify but hard to compute within the circuit. For example, if you wanted to calculate a square root of a number it'll be a much better idea to calculate this in brillig and then assert that if you square the result you get back your number. diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md new file mode 100644 index 00000000000..5cbe9421b92 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend.md @@ -0,0 +1,185 @@ +# BarretenbergBackend + +## Implements + +- [`Backend`](../interfaces/Backend.md) + +## Constructors + +### new BarretenbergBackend(acirCircuit, options) + +```ts +new BarretenbergBackend(acirCircuit, options): BarretenbergBackend +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `acirCircuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `options` | [`BackendOptions`](../type-aliases/BackendOptions.md) | + +#### Returns + +[`BarretenbergBackend`](BarretenbergBackend.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`destroy`](../interfaces/Backend.md#destroy) + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateFinalProof`](../interfaces/Backend.md#generatefinalproof) + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(witness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `witness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProof`](../interfaces/Backend.md#generateintermediateproof) + +#### Example + +```typescript +const intermediateProof = await backend.generateIntermediateProof(witness); +``` + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | Default value | +| :------ | :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | `undefined` | +| `numOfPublicInputs` | `number` | `0` | + +#### Returns + +`Promise`\<`object`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`generateIntermediateProofArtifacts`](../interfaces/Backend.md#generateintermediateproofartifacts) + +#### Example + +```typescript +const artifacts = await backend.generateIntermediateProofArtifacts(proof, numOfPublicInputs); +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyFinalProof`](../interfaces/Backend.md#verifyfinalproof) + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Implementation of + +[`Backend`](../interfaces/Backend.md).[`verifyIntermediateProof`](../interfaces/Backend.md#verifyintermediateproof) + +#### Example + +```typescript +const isValidIntermediate = await backend.verifyIntermediateProof(proof); +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md new file mode 100644 index 00000000000..bfbecb52864 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/index.md @@ -0,0 +1,45 @@ +# backend_barretenberg + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [BarretenbergBackend](classes/BarretenbergBackend.md) | - | + +### Interfaces + +| Interface | Description | +| :------ | :------ | +| [Backend](interfaces/Backend.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [BackendOptions](type-aliases/BackendOptions.md) | - | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | + +## Functions + +### flattenPublicInputs() + +```ts +flattenPublicInputs(publicInputs): string[] +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `publicInputs` | `WitnessMap` | + +#### Returns + +`string`[] + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md new file mode 100644 index 00000000000..3eb9645c8d2 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/interfaces/Backend.md @@ -0,0 +1,132 @@ +# Backend + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the backend + +*** + +### generateFinalProof() + +```ts +generateFinalProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a final proof (not meant to be verified in another circuit) + +*** + +### generateIntermediateProof() + +```ts +generateIntermediateProof(decompressedWitness): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `decompressedWitness` | `Uint8Array` | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates an intermediate proof (meant to be verified in another circuit) + +*** + +### generateIntermediateProofArtifacts() + +```ts +generateIntermediateProofArtifacts(proofData, numOfPublicInputs): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | +| `numOfPublicInputs` | `number` | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Retrieves the artifacts from a proof in the Field format + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies a final proof + +*** + +### verifyIntermediateProof() + +```ts +verifyIntermediateProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Verifies an intermediate proof + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md new file mode 100644 index 00000000000..266ade75d17 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions.md @@ -0,0 +1,19 @@ +# BackendOptions + +```ts +type BackendOptions: object; +``` + +## Description + +An options object, currently only used to specify the number of threads to use. + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `threads` | `number` | **Description**

Number of threads | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs new file mode 100644 index 00000000000..2aaa55bccf6 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/backend_barretenberg/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/classes/BarretenbergBackend","label":"BarretenbergBackend"}]},{"type":"category","label":"Interfaces","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/interfaces/Backend","label":"Backend"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/BackendOptions","label":"BackendOptions"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/backend_barretenberg/type-aliases/ProofData","label":"ProofData"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll new file mode 100644 index 00000000000..e2ac6616add --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/.nojekyll @@ -0,0 +1 @@ +TypeDoc added this file to prevent GitHub Pages from using Jekyll. You can turn off this behavior by setting the `githubPages` option to false. \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md new file mode 100644 index 00000000000..34e20d99684 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/classes/Noir.md @@ -0,0 +1,132 @@ +# Noir + +## Constructors + +### new Noir(circuit, backend) + +```ts +new Noir(circuit, backend?): Noir +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `circuit` | [`CompiledCircuit`](../type-aliases/CompiledCircuit.md) | +| `backend`? | `Backend` | + +#### Returns + +[`Noir`](Noir.md) + +## Methods + +### destroy() + +```ts +destroy(): Promise +``` + +#### Returns + +`Promise`\<`void`\> + +#### Description + +Destroys the underlying backend instance. + +#### Example + +```typescript +await noir.destroy(); +``` + +*** + +### execute() + +```ts +execute(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<`object`\> + +#### Description + +Allows to execute a circuit to get its witness and return value. + +#### Example + +```typescript +async execute(inputs) +``` + +*** + +### generateFinalProof() + +```ts +generateFinalProof(inputs, foreignCallHandler?): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `inputs` | [`InputMap`](../type-aliases/InputMap.md) | +| `foreignCallHandler`? | [`ForeignCallHandler`](../type-aliases/ForeignCallHandler.md) | + +#### Returns + +`Promise`\<[`ProofData`](../type-aliases/ProofData.md)\> + +#### Description + +Generates a witness and a proof given an object as input. + +#### Example + +```typescript +async generateFinalProof(input) +``` + +*** + +### verifyFinalProof() + +```ts +verifyFinalProof(proofData): Promise +``` + +#### Parameters + +| Parameter | Type | +| :------ | :------ | +| `proofData` | [`ProofData`](../type-aliases/ProofData.md) | + +#### Returns + +`Promise`\<`boolean`\> + +#### Description + +Instantiates the verification key and verifies a proof. + +#### Example + +```typescript +async verifyFinalProof(proof) +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md new file mode 100644 index 00000000000..c783283e396 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/and.md @@ -0,0 +1,22 @@ +# and() + +```ts +and(lhs, rhs): string +``` + +Performs a bitwise AND operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md new file mode 100644 index 00000000000..7882d0da8d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/blake2s256.md @@ -0,0 +1,21 @@ +# blake2s256() + +```ts +blake2s256(inputs): Uint8Array +``` + +Calculates the Blake2s256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md new file mode 100644 index 00000000000..0ba5783f0d5 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify.md @@ -0,0 +1,29 @@ +# ecdsa\_secp256k1\_verify() + +```ts +ecdsa_secp256k1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. +Verifies a ECDSA signature over the secp256k1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md new file mode 100644 index 00000000000..0b20ff68957 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify.md @@ -0,0 +1,28 @@ +# ecdsa\_secp256r1\_verify() + +```ts +ecdsa_secp256r1_verify( + hashed_msg, + public_key_x_bytes, + public_key_y_bytes, + signature): boolean +``` + +Verifies a ECDSA signature over the secp256r1 curve. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `hashed_msg` | `Uint8Array` | | +| `public_key_x_bytes` | `Uint8Array` | | +| `public_key_y_bytes` | `Uint8Array` | | +| `signature` | `Uint8Array` | | + +## Returns + +`boolean` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md new file mode 100644 index 00000000000..d10f155ce86 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/keccak256.md @@ -0,0 +1,21 @@ +# keccak256() + +```ts +keccak256(inputs): Uint8Array +``` + +Calculates the Keccak256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md new file mode 100644 index 00000000000..6ba4ecac022 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/sha256.md @@ -0,0 +1,21 @@ +# sha256() + +```ts +sha256(inputs): Uint8Array +``` + +Calculates the SHA256 hash of the input bytes + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `inputs` | `Uint8Array` | | + +## Returns + +`Uint8Array` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md new file mode 100644 index 00000000000..8d762b895d3 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/functions/xor.md @@ -0,0 +1,22 @@ +# xor() + +```ts +xor(lhs, rhs): string +``` + +Performs a bitwise XOR operation between `lhs` and `rhs` + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `lhs` | `string` | | +| `rhs` | `string` | | + +## Returns + +`string` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md new file mode 100644 index 00000000000..8b9e35bc9a1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/index.md @@ -0,0 +1,37 @@ +# noir_js + +## Exports + +### Classes + +| Class | Description | +| :------ | :------ | +| [Noir](classes/Noir.md) | - | + +### Type Aliases + +| Type alias | Description | +| :------ | :------ | +| [CompiledCircuit](type-aliases/CompiledCircuit.md) | - | +| [ForeignCallHandler](type-aliases/ForeignCallHandler.md) | A callback which performs an foreign call and returns the response. | +| [ForeignCallInput](type-aliases/ForeignCallInput.md) | - | +| [ForeignCallOutput](type-aliases/ForeignCallOutput.md) | - | +| [InputMap](type-aliases/InputMap.md) | - | +| [ProofData](type-aliases/ProofData.md) | - | +| [WitnessMap](type-aliases/WitnessMap.md) | - | + +### Functions + +| Function | Description | +| :------ | :------ | +| [and](functions/and.md) | Performs a bitwise AND operation between `lhs` and `rhs` | +| [blake2s256](functions/blake2s256.md) | Calculates the Blake2s256 hash of the input bytes | +| [ecdsa\_secp256k1\_verify](functions/ecdsa_secp256k1_verify.md) | Calculates the Blake2s256 hash of the input bytes and represents these as a single field element. | +| [ecdsa\_secp256r1\_verify](functions/ecdsa_secp256r1_verify.md) | Verifies a ECDSA signature over the secp256r1 curve. | +| [keccak256](functions/keccak256.md) | Calculates the Keccak256 hash of the input bytes | +| [sha256](functions/sha256.md) | Calculates the SHA256 hash of the input bytes | +| [xor](functions/xor.md) | Performs a bitwise XOR operation between `lhs` and `rhs` | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md new file mode 100644 index 00000000000..34e0dd04205 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/CompiledCircuit.md @@ -0,0 +1,20 @@ +# CompiledCircuit + +```ts +type CompiledCircuit: object; +``` + +## Description + +The representation of a compiled circuit + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `abi` | `Abi` | **Description**

ABI representation of the circuit | +| `bytecode` | `string` | **Description**

The bytecode of the circuit | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md new file mode 100644 index 00000000000..812b8b16481 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallHandler.md @@ -0,0 +1,24 @@ +# ForeignCallHandler + +```ts +type ForeignCallHandler: (name, inputs) => Promise; +``` + +A callback which performs an foreign call and returns the response. + +## Parameters + +| Parameter | Type | Description | +| :------ | :------ | :------ | +| `name` | `string` | The identifier for the type of foreign call being performed. | +| `inputs` | [`ForeignCallInput`](ForeignCallInput.md)[] | An array of hex encoded inputs to the foreign call. | + +## Returns + +`Promise`\<[`ForeignCallOutput`](ForeignCallOutput.md)[]\> + +outputs - An array of hex encoded outputs containing the results of the foreign call. + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md new file mode 100644 index 00000000000..dd95809186a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallInput.md @@ -0,0 +1,9 @@ +# ForeignCallInput + +```ts +type ForeignCallInput: string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md new file mode 100644 index 00000000000..b71fb78a946 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ForeignCallOutput.md @@ -0,0 +1,9 @@ +# ForeignCallOutput + +```ts +type ForeignCallOutput: string | string[]; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md new file mode 100644 index 00000000000..c714e999d93 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/InputMap.md @@ -0,0 +1,13 @@ +# InputMap + +```ts +type InputMap: object; +``` + +## Index signature + + \[`key`: `string`\]: `InputValue` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md new file mode 100644 index 00000000000..3eb360a78f1 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/ProofData.md @@ -0,0 +1,20 @@ +# ProofData + +```ts +type ProofData: object; +``` + +## Description + +The representation of a proof + +## Type declaration + +| Member | Type | Description | +| :------ | :------ | :------ | +| `proof` | `Uint8Array` | **Description**

An byte array representing the proof | +| `publicInputs` | `WitnessMap` | **Description**

Public inputs of a proof | + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md new file mode 100644 index 00000000000..258c46f9d0c --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/type-aliases/WitnessMap.md @@ -0,0 +1,9 @@ +# WitnessMap + +```ts +type WitnessMap: Map; +``` + +*** + +Generated using [typedoc-plugin-markdown](https://www.npmjs.com/package/typedoc-plugin-markdown) and [TypeDoc](https://typedoc.org/) diff --git a/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs new file mode 100644 index 00000000000..fe2629ddc9f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/NoirJS/noir_js/typedoc-sidebar.cjs @@ -0,0 +1,4 @@ +// @ts-check +/** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ +const typedocSidebar = { items: [{"type":"category","label":"Classes","items":[{"type":"doc","id":"reference/NoirJS/noir_js/classes/Noir","label":"Noir"}]},{"type":"category","label":"Type Aliases","items":[{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/CompiledCircuit","label":"CompiledCircuit"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallHandler","label":"ForeignCallHandler"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallInput","label":"ForeignCallInput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ForeignCallOutput","label":"ForeignCallOutput"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/InputMap","label":"InputMap"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/ProofData","label":"ProofData"},{"type":"doc","id":"reference/NoirJS/noir_js/type-aliases/WitnessMap","label":"WitnessMap"}]},{"type":"category","label":"Functions","items":[{"type":"doc","id":"reference/NoirJS/noir_js/functions/and","label":"and"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/blake2s256","label":"blake2s256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256k1_verify","label":"ecdsa_secp256k1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/ecdsa_secp256r1_verify","label":"ecdsa_secp256r1_verify"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/keccak256","label":"keccak256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/sha256","label":"sha256"},{"type":"doc","id":"reference/NoirJS/noir_js/functions/xor","label":"xor"}]}]}; +module.exports = typedocSidebar.items; \ No newline at end of file diff --git a/docs/versioned_docs/version-v0.22.0/reference/_category_.json b/docs/versioned_docs/version-v0.22.0/reference/_category_.json new file mode 100644 index 00000000000..5b6a20a609a --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/_category_.json @@ -0,0 +1,5 @@ +{ + "position": 4, + "collapsible": true, + "collapsed": true +} diff --git a/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md b/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md new file mode 100644 index 00000000000..ff3dee8973f --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/reference/nargo_commands.md @@ -0,0 +1,250 @@ +--- +title: Nargo +description: + Noir CLI Commands for Noir Prover and Verifier to create, execute, prove and verify programs, + generate Solidity verifier smart contract and compile into JSON file containing ACIR + representation and ABI of circuit. +keywords: + [ + Nargo, + Noir CLI, + Noir Prover, + Noir Verifier, + generate Solidity verifier, + compile JSON file, + ACIR representation, + ABI of circuit, + TypeScript, + ] +sidebar_position: 0 +--- + +## General options + +| Option | Description | +| -------------------- | -------------------------------------------------- | +| `--show-ssa` | Emit debug information for the intermediate SSA IR | +| `--deny-warnings` | Quit execution when warnings are emitted | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo help [subcommand]` + +Prints the list of available commands or specific information of a subcommand. + +_Arguments_ + +| Argument | Description | +| -------------- | -------------------------------------------- | +| `` | The subcommand whose help message to display | + +## `nargo backend` + +Installs and selects custom backends used to generate and verify proofs. + +### Commands + +| Command | Description | +| ----------- | --------------------------------------------------------- | +| `current` | Prints the name of the currently active backend | +| `ls` | Prints the list of currently installed backends | +| `use` | Select the backend to use | +| `install` | Install a new backend from a URL | +| `uninstall` | Uninstalls a backend | +| `help` | Print this message or the help of the given subcommand(s) | + +### Options + +| Option | Description | +| ------------ | ----------- | +| `-h, --help` | Print help | + +## `nargo check` + +Generate the `Prover.toml` and `Verifier.toml` files for specifying prover and verifier in/output +values of the Noir program respectively. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to check | +| `--workspace` | Check all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +### `nargo codegen-verifier` + +Generate a Solidity verifier smart contract for the program. + +### Options + +| Option | Description | +| --------------------- | ------------------------------------- | +| `--package ` | The name of the package to codegen | +| `--workspace` | Codegen all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo compile` + +Compile the program into a JSON build artifact file containing the ACIR representation and the ABI +of the circuit. This build artifact can then be used to generate and verify proofs. + +You can also use "build" as an alias for compile (e.g. `nargo build`). + +### Options + +| Option | Description | +| --------------------- | ------------------------------------------------------------ | +| `--package ` | The name of the package to compile | +| `--workspace` | Compile all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo new ` + +Creates a new Noir project in a new folder. + +**Arguments** + +| Argument | Description | +| -------- | -------------------------------- | +| `` | The path to save the new project | + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: package directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo init` + +Creates a new Noir project in the current directory. + +### Options + +| Option | Description | +| --------------- | ----------------------------------------------------- | +| `--name ` | Name of the package [default: current directory name] | +| `--lib` | Use a library template | +| `--bin` | Use a binary template [default] | +| `--contract` | Use a contract template | +| `-h, --help` | Print help | + +## `nargo execute [WITNESS_NAME]` + +Runs the Noir program and prints its return value. + +**Arguments** + +| Argument | Description | +| ---------------- | ----------------------------------------- | +| `[WITNESS_NAME]` | Write the execution witness to named file | + +### Options + +| Option | Description | +| --------------------------------- | ------------------------------------------------------------------------------------ | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `--package ` | The name of the package to execute | +| `--workspace` | Execute all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +_Usage_ + +The inputs to the circuit are read from the `Prover.toml` file generated by `nargo check`, which +must be filled in. + +To save the witness to file, run the command with a value for the `WITNESS_NAME` argument. A +`.tr` file will then be saved in the `./target` folder. + +## `nargo prove` + +Creates a proof for the program. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-p, --prover-name ` | The name of the toml file which contains the inputs for the prover [default: Prover] | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--verify` | Verify proof after proving | +| `--package ` | The name of the package to prove | +| `--workspace` | Prove all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo verify` + +Given a proof and a program, verify whether the proof is valid. + +### Options + +| Option | Description | +| ------------------------------------- | ---------------------------------------------------------------------------------------- | +| `-v, --verifier-name ` | The name of the toml file which contains the inputs for the verifier [default: Verifier] | +| `--package ` | The name of the package to verify | +| `--workspace` | Verify all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo test [TEST_NAME]` + +Nargo will automatically compile and run any functions which have the decorator `#[test]` on them if +you run `nargo test`. To print `println` statements in tests, use the `--show-output` flag. + +Takes an optional `--exact` flag which allows you to select tests based on an exact name. + +See an example on the [testing page](../getting_started/tooling/testing.md). + +### Options + +| Option | Description | +| --------------------- | -------------------------------------- | +| `--show-output` | Display output of `println` statements | +| `--exact` | Only run tests that match exactly | +| `--package ` | The name of the package to test | +| `--workspace` | Test all packages in the workspace | +| `--print-acir` | Display the ACIR for compiled circuit | +| `--deny-warnings` | Treat all warnings as errors | +| `--silence-warnings` | Suppress warnings | +| `-h, --help` | Print help | + +## `nargo info` + +Prints a table containing the information of the package. + +Currently the table provide + +1. The number of ACIR opcodes +2. The final number gates in the circuit used by a backend + +If the file contains a contract the table will provide the +above information about each function of the contract. + +## `nargo lsp` + +Start a long-running Language Server process that communicates over stdin/stdout. +Usually this command is not run by a user, but instead will be run by a Language Client, such as [vscode-noir](https://github.com/noir-lang/vscode-noir). + +## `nargo fmt` + +Automatically formats your Noir source code based on the default formatting settings. diff --git a/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md b/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md new file mode 100644 index 00000000000..0763b6224c9 --- /dev/null +++ b/docs/versioned_docs/version-v0.22.0/tutorials/noirjs_app.md @@ -0,0 +1,261 @@ +--- +title: Tiny NoirJS app +description: Learn how to setup a new app that uses Noir to generate and verify zero-knowledge SNARK proofs in a typescript or javascript environment +keywords: [how to, guide, javascript, typescript, noir, barretenberg, zero-knowledge, proofs] +sidebar_position: 0 +--- + +NoirJS works both on the browser and on the server, and works for both ESM and CJS module systems. In this page, we will learn how can we write a simple test and a simple web app to verify the standard Noir example. + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Before we start + +:::note + +Feel free to use whatever versions, just keep in mind that Nargo and the NoirJS packages are meant to be in sync. For example, Nargo 0.18.x matches `noir_js@0.18.x`, etc. + +In this guide, we will be pinned to 0.17.0. + +::: + +Make sure you have Node installed on your machine by opening a terminal and executing `node --version`. If you don't see a version, you should install [node](https://github.com/nvm-sh/nvm). You can also use `yarn` if you prefer that package manager over npm (which comes with node). + +First of all, follow the the [Nargo guide](../getting_started/installation/index.md) to install nargo version 0.17.0 and create a new project with `nargo new circuit`. Once there, `cd` into the `circuit` folder. You should then be able to compile your circuit into `json` format and see it inside the `target` folder: + +```bash +nargo compile +``` + +Your folder structure should look like: + +```tree +. +└── circuit + ├── Nargo.toml + ├── src + │ └── main.nr + └── target + └── circuit.json +``` + +## Starting a new project + +Go back to the previous folder and start a new project by running run `npm init`. You can configure your project or just leave the defaults, and see a `package.json` appear in your root folder. + +## Installing dependencies + +We'll need two `npm` packages. These packages will provide us the methods we need to run and verify proofs: + +```bash +npm i @noir-lang/backend_barretenberg@^0.17.0 @noir-lang/noir_js@^0.17.0 +``` + +To serve our page, we can use a build tool such as `vite`. Because we're gonna use some `wasm` files, we need to install a plugin as well. Run: + +```bash +npm i --save-dev vite rollup-plugin-copy +``` + +Since we're on the dependency world, we may as well define a nice starting script. Vite makes it easy. Just open `package.json`, find the block "scripts" and add this just below the line with `"test" : "echo......."`: + +```json + "start": "vite --open" +``` + +If you want do build a static website, you can also add some build and preview scripts: + +```json + "build": "vite build", + "preview": "vite preview" +``` + +## Vite plugins + +Vite is great, but support from `wasm` doesn't work out-of-the-box. We're gonna write a quick plugin and use another one. Just copy and paste this into a file named `vite.config.js`. You don't need to understand it, just trust me bro. + +```js +import { defineConfig } from 'vite'; +import copy from 'rollup-plugin-copy'; +import fs from 'fs'; +import path from 'path'; + +const wasmContentTypePlugin = { + name: 'wasm-content-type-plugin', + configureServer(server) { + server.middlewares.use(async (req, res, next) => { + if (req.url.endsWith('.wasm')) { + res.setHeader('Content-Type', 'application/wasm'); + const newPath = req.url.replace('deps', 'dist'); + const targetPath = path.join(__dirname, newPath); + const wasmContent = fs.readFileSync(targetPath); + return res.end(wasmContent); + } + next(); + }); + }, +}; + +export default defineConfig(({ command }) => { + if (command === 'serve') { + return { + plugins: [ + copy({ + targets: [{ src: 'node_modules/**/*.wasm', dest: 'node_modules/.vite/dist' }], + copySync: true, + hook: 'buildStart', + }), + command === 'serve' ? wasmContentTypePlugin : [], + ], + }; + } + + return {}; +}); +``` + +## HTML + +Here's the simplest HTML with some terrible UI. Create a file called `index.html` and paste this: + +```html + + + + + + +

Very basic Noir app

+
+

Logs

+

Proof

+
+ + +``` + +## Some good old vanilla Javascript + +Create a new file `app.js`, which is where our javascript code will live. Let's start with this code inside: + +```js +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); + +function display(container, msg) { + const c = document.getElementById(container); + const p = document.createElement('p'); + p.textContent = msg; + c.appendChild(p); +} +``` + +We can manipulate our website with this little function, so we can see our website working. + +## Adding Noir + +If you come from the previous page, your folder structure should look like this: + +```tree +├── app.js +├── circuit +│ ├── Nargo.toml +│ ├── src +│ │ └── main.nr +│ └── target +│ └── circuit.json +├── index.html +├── package.json +└── vite.config.js +``` + +You'll see other files and folders showing up (like `package-lock.json`, `yarn.lock`, `node_modules`) but you shouldn't have to care about those. + +## Importing our dependencies + +We're starting with the good stuff now. At the top of the new javascript file, import the packages: + +```ts +import { BarretenbergBackend } from '@noir-lang/backend_barretenberg'; +import { Noir } from '@noir-lang/noir_js'; +``` + +We also need to import the `circuit` JSON file we created. If you have the suggested folder structure, you can add this line: + +```ts +import circuit from './circuit/target/circuit.json'; +``` + +## Write code + +:::note + +We're gonna be adding code inside the `document.addEventListener...etc` block: + +```js +// forget stuff here +document.addEventListener('DOMContentLoaded', async () => { + // here's where the magic happens +}); +// forget stuff here +``` + +::: + +Our dependencies exported two classes: `BarretenbergBackend` and `Noir`. Let's `init` them and add some logs, just to flex: + +```ts +const backend = new BarretenbergBackend(circuit); +const noir = new Noir(circuit, backend); +``` + +## Proving + +Now we're ready to prove stuff! Let's feed some inputs to our circuit and calculate the proof: + +```js +const input = { x: 1, y: 2 }; +display('logs', 'Generating proof... ⌛'); +const proof = await noir.generateFinalProof(input); +display('logs', 'Generating proof... ✅'); +display('results', proof.proof); +``` + +You're probably eager to see stuff happening, so go and run your app now! + +From your terminal, run `npm start` (or `yarn start`). If it doesn't open a browser for you, just visit `localhost:5173`. On a modern laptop, proof will generate in less than 100ms, and you'll see this: + +![Getting Started 0](@site/static/img/noir_getting_started_1.png) + +If you're human, you shouldn't be able to understand anything on the "proof" box. That's OK. We like you, human. + +In any case, this means your proof was generated! But you shouldn't trust me just yet. Add these lines to see it being verified: + +```js +display('logs', 'Verifying proof... ⌛'); +const verification = await noir.verifyFinalProof(proof); +if (verification) display('logs', 'Verifying proof... ✅'); +``` + +By saving, your app will refresh and here's our complete Tiny Noir App! + +You can find the complete app code for this guide [here](https://github.com/noir-lang/tiny-noirjs-app). + +## Further Reading + +You can see how noirjs is used in a full stack Next.js hardhat application in the [noir-starter repo here](https://github.com/noir-lang/noir-starter/tree/main/vite-hardhat). The example shows how to calculate a proof in the browser and verify it with a deployed Solidity verifier contract from noirjs. + +You should also check out the more advanced examples in the [noir-examples repo](https://github.com/noir-lang/noir-examples), where you'll find reference usage for some cool apps. diff --git a/docs/versioned_sidebars/version-v0.22.0-sidebars.json b/docs/versioned_sidebars/version-v0.22.0-sidebars.json new file mode 100644 index 00000000000..b16f79cc176 --- /dev/null +++ b/docs/versioned_sidebars/version-v0.22.0-sidebars.json @@ -0,0 +1,83 @@ +{ + "sidebar": [ + { + "type": "doc", + "id": "index" + }, + { + "type": "category", + "label": "Getting Started", + "items": [ + { + "type": "autogenerated", + "dirName": "getting_started" + } + ] + }, + { + "type": "category", + "label": "The Noir Language", + "items": [ + { + "type": "autogenerated", + "dirName": "noir" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "category", + "label": "How To Guides", + "items": [ + { + "type": "autogenerated", + "dirName": "how_to" + } + ] + }, + { + "type": "category", + "label": "Explainers", + "items": [ + { + "type": "autogenerated", + "dirName": "explainers" + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "autogenerated", + "dirName": "tutorials" + } + ] + }, + { + "type": "category", + "label": "Reference", + "items": [ + { + "type": "autogenerated", + "dirName": "reference" + } + ] + }, + { + "type": "html", + "value": "
", + "defaultStyle": true + }, + { + "type": "doc", + "id": "migration_notes", + "label": "Migration notes" + } + ] +} diff --git a/docs/versions.json b/docs/versions.json deleted file mode 100644 index 7e140c94b73..00000000000 --- a/docs/versions.json +++ /dev/null @@ -1,4 +0,0 @@ -[ - "v0.19.4", - "v0.17.0" -] diff --git a/flake.nix b/flake.nix index 0ec712aa082..6849dc0a0ad 100644 --- a/flake.nix +++ b/flake.nix @@ -73,7 +73,7 @@ # Configuration shared between builds config = { # x-release-please-start-version - version = "0.20.0"; + version = "0.23.0"; # x-release-please-end src = pkgs.lib.cleanSourceWith { @@ -118,9 +118,6 @@ native-cargo-artifacts = craneLib.buildDepsOnly (nativeConfig // { pname = "nargo"; }); - noir-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { - pname = "noir_wasm"; - }); noirc-abi-wasm-cargo-artifacts = craneLib.buildDepsOnly (wasmConfig // { pname = "noirc_abi_wasm"; }); @@ -139,25 +136,6 @@ doCheck = false; }); - noir_wasm = craneLib.buildPackage (wasmConfig // { - pname = "noir_wasm"; - - inherit GIT_COMMIT GIT_DIRTY; - - cargoArtifacts = noir-wasm-cargo-artifacts; - - buildPhaseCargoCommand = '' - bash compiler/wasm/buildPhaseCargoCommand.sh release - ''; - - installPhase = '' - bash compiler/wasm/installPhase.sh - ''; - - # We don't want to run tests because they don't work in the Nix sandbox - doCheck = false; - }); - noirc_abi_wasm = craneLib.buildPackage (wasmConfig // rec { pname = "noirc_abi_wasm"; @@ -232,18 +210,16 @@ # Nix flakes cannot build more than one derivation in one command (see https://github.com/NixOS/nix/issues/5591) # so we use `symlinkJoin` to build everything as the "all" package. - all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noir_wasm noirc_abi_wasm acvm_js ]; }; - all_wasm = pkgs.symlinkJoin { name = "all_wasm"; paths = [ noir_wasm noirc_abi_wasm acvm_js ]; }; + all = pkgs.symlinkJoin { name = "all"; paths = [ nargo noirc_abi_wasm acvm_js ]; }; + all_wasm = pkgs.symlinkJoin { name = "all_wasm"; paths = [ noirc_abi_wasm acvm_js ]; }; # We also export individual packages to enable `nix build .#nargo -L`, etc. inherit nargo; - inherit noir_wasm; inherit noirc_abi_wasm; inherit acvm_js; # We expose the `*-cargo-artifacts` derivations so we can cache our cargo dependencies in CI inherit native-cargo-artifacts; - inherit noir-wasm-cargo-artifacts; inherit noirc-abi-wasm-cargo-artifacts; inherit acvm-js-cargo-artifacts; }; @@ -253,7 +229,6 @@ devShells.default = pkgs.mkShell (environment // { inputsFrom = [ nargo - noir_wasm noirc_abi_wasm acvm_js ]; diff --git a/noir_stdlib/src/cmp.nr b/noir_stdlib/src/cmp.nr new file mode 100644 index 00000000000..11127494c18 --- /dev/null +++ b/noir_stdlib/src/cmp.nr @@ -0,0 +1,310 @@ +trait Eq { + fn eq(self, other: Self) -> bool; +} + +impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } + +impl Eq for u1 { fn eq(self, other: u1) -> bool { self == other } } +impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } +impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } +impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } +impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } + +impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } +impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } +impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } +impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } + +impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } +impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } + +impl Eq for [T; N] where T: Eq { + fn eq(self, other: [T; N]) -> bool { + let mut result = true; + for i in 0 .. self.len() { + result &= self[i].eq(other[i]); + } + result + } +} + +impl Eq for str { + fn eq(self, other: str) -> bool { + let self_bytes = self.as_bytes(); + let other_bytes = other.as_bytes(); + self_bytes == other_bytes + } +} + +impl Eq for (A, B) where A: Eq, B: Eq { + fn eq(self, other: (A, B)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) + } +} + +impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { + fn eq(self, other: (A, B, C)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) + } +} + +impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { + fn eq(self, other: (A, B, C, D)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) + } +} + +impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { + fn eq(self, other: (A, B, C, D, E)) -> bool { + self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) + } +} + +impl Eq for Ordering { + fn eq(self, other: Ordering) -> bool { + self.result == other.result + } +} + + +// Noir doesn't have enums yet so we emulate (Lt | Eq | Gt) with a struct +// that has 3 public functions for constructing the struct. +struct Ordering { + result: Field, +} + +impl Ordering { + // Implementation note: 0, 1, and 2 for Lt, Eq, and Gt are built + // into the compiler, do not change these without also updating + // the compiler itself! + pub fn less() -> Ordering { + Ordering { result: 0 } + } + + pub fn equal() -> Ordering { + Ordering { result: 1 } + } + + pub fn greater() -> Ordering { + Ordering { result: 2 } + } +} + + +trait Ord { + fn cmp(self, other: Self) -> Ordering; +} + +// Note: Field deliberately does not implement Ord + +impl Ord for u8 { + fn cmp(self, other: u8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u16 { + fn cmp(self, other: u16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u32 { + fn cmp(self, other: u32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for u64 { + fn cmp(self, other: u64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i8 { + fn cmp(self, other: i8) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i16 { + fn cmp(self, other: i16) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i32 { + fn cmp(self, other: i32) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for i64 { + fn cmp(self, other: i64) -> Ordering { + if self < other { + Ordering::less() + } else if self > other { + Ordering::greater() + } else { + Ordering::equal() + } + } +} + +impl Ord for () { + fn cmp(_self: Self, _other: ()) -> Ordering { + Ordering::equal() + } +} + +impl Ord for bool { + fn cmp(self, other: bool) -> Ordering { + if self { + if other { + Ordering::equal() + } else { + Ordering::greater() + } + } else { + if other { + Ordering::less() + } else { + Ordering::equal() + } + } + } +} + +impl Ord for [T; N] where T: Ord { + // The first non-equal element of both arrays determines + // the ordering for the whole array. + fn cmp(self, other: [T; N]) -> Ordering { + let mut result = Ordering::equal(); + for i in 0 .. self.len() { + if result == Ordering::equal() { + let result_i = self[i].cmp(other[i]); + + if result_i == Ordering::less() { + result = result_i; + } else if result_i == Ordering::greater() { + result = result_i; + } + } + } + result + } +} + +impl Ord for (A, B) where A: Ord, B: Ord { + fn cmp(self, other: (A, B)) -> Ordering { + let result = self.0.cmp(other.0); + + if result != Ordering::equal() { + result + } else { + self.1.cmp(other.1) + } + } +} + +impl Ord for (A, B, C) where A: Ord, B: Ord, C: Ord { + fn cmp(self, other: (A, B, C)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + result + } +} + +impl Ord for (A, B, C, D) where A: Ord, B: Ord, C: Ord, D: Ord { + fn cmp(self, other: (A, B, C, D)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + result + } +} + +impl Ord for (A, B, C, D, E) where A: Ord, B: Ord, C: Ord, D: Ord, E: Ord { + fn cmp(self, other: (A, B, C, D, E)) -> Ordering { + let mut result = self.0.cmp(other.0); + + if result == Ordering::equal() { + result = self.1.cmp(other.1); + } + + if result == Ordering::equal() { + result = self.2.cmp(other.2); + } + + if result == Ordering::equal() { + result = self.3.cmp(other.3); + } + + if result == Ordering::equal() { + result = self.4.cmp(other.4); + } + + result + } +} diff --git a/noir_stdlib/src/ec/montcurve.nr b/noir_stdlib/src/ec/montcurve.nr index 82d22837b46..83a17bae322 100644 --- a/noir_stdlib/src/ec/montcurve.nr +++ b/noir_stdlib/src/ec/montcurve.nr @@ -12,6 +12,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::sqrt; use crate::ec::ZETA; + use crate::cmp::Eq; + // Curve specification struct Curve { // Montgomery Curve configuration (ky^2 = x^3 + j*x^2 + x) j: Field, @@ -32,11 +34,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) - } - // Check if zero pub fn is_zero(self) -> bool { self.infty @@ -76,6 +73,12 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.infty & p.infty) | (!self.infty & !p.infty & (self.x == p.x) & (self.y == p.y)) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { @@ -219,6 +222,7 @@ mod curvegroup { use crate::ec::swcurve::curvegroup::Point as SWPoint; use crate::ec::tecurve::curvegroup::Curve as TECurve; use crate::ec::tecurve::curvegroup::Point as TEPoint; + use crate::cmp::Eq; struct Curve { // Montgomery Curve configuration (ky^2 z = x*(x^2 + j*x*z + z*z)) j: Field, @@ -239,11 +243,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Self) -> bool { - (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) - } - // Check if zero pub fn is_zero(self) -> bool { self.z == 0 @@ -277,6 +276,12 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + (self.z == p.z) | (((self.x * self.z) == (p.x * p.z)) & ((self.y * self.z) == (p.y * p.z))) + } + } + impl Curve { // Curve constructor pub fn new(j: Field, k: Field, gen: Point) -> Self { diff --git a/noir_stdlib/src/ec/swcurve.nr b/noir_stdlib/src/ec/swcurve.nr index e9b6f661843..e64f5a7be02 100644 --- a/noir_stdlib/src/ec/swcurve.nr +++ b/noir_stdlib/src/ec/swcurve.nr @@ -7,6 +7,8 @@ mod affine { use crate::ec::safe_inverse; use crate::ec::is_square; use crate::ec::sqrt; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + ax + b @@ -28,15 +30,6 @@ mod affine { Self {x, y, infty: false} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, infty: inf1} = self; - let Self {x: x2, y: y2, infty: inf2} = p; - - (inf1 & inf2) - | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -65,6 +58,16 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, infty: inf1} = self; + let Self {x: x2, y: y2, infty: inf2} = p; + + (inf1 & inf2) + | (!inf1 & !inf2 & (x1 == x2) & (y1 == y2)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { @@ -182,6 +185,8 @@ mod curvegroup { // Points are represented by three-dimensional Jacobian coordinates. // See for details. use crate::ec::swcurve::affine; + use crate::cmp::Eq; + // Curve specification struct Curve { // Short Weierstraß curve // Coefficients in defining equation y^2 = x^3 + axz^4 + bz^6 @@ -203,14 +208,6 @@ mod curvegroup { Self {x, y, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, z: z1} = self; - let Self {x: x2, y: y2, z: z2} = p; - - ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -240,6 +237,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, z: z1} = self; + let Self {x: x2, y: y2, z: z2} = p; + + ((z1 == 0) & (z2 == 0)) | ((z1 != 0) & (z2 != 0) & (x1*z2*z2 == x2*z1*z1) & (y1*z2*z2*z2 == y2*z1*z1*z1)) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, b: Field, gen: Point) -> Curve { diff --git a/noir_stdlib/src/ec/tecurve.nr b/noir_stdlib/src/ec/tecurve.nr index 849b45ff012..5333ece4c4a 100644 --- a/noir_stdlib/src/ec/tecurve.nr +++ b/noir_stdlib/src/ec/tecurve.nr @@ -9,6 +9,8 @@ mod affine { use crate::ec::montcurve::affine::Point as MPoint; use crate::ec::swcurve::affine::Curve as SWCurve; use crate::ec::swcurve::affine::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation ax^2 + y^2 = 1 + dx^2y^2 @@ -29,14 +31,6 @@ mod affine { Self { x, y } } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1} = self; - let Self {x: x2, y: y2} = p; - - (x1 == x2) & (y1 == y2) - } - // Check if zero pub fn is_zero(self) -> bool { self.eq(Point::zero()) @@ -74,6 +68,15 @@ mod affine { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1} = self; + let Self {x: x2, y: y2} = p; + + (x1 == x2) & (y1 == y2) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { @@ -198,6 +201,8 @@ mod curvegroup { use crate::ec::montcurve::curvegroup::Point as MPoint; use crate::ec::swcurve::curvegroup::Curve as SWCurve; use crate::ec::swcurve::curvegroup::Point as SWPoint; + use crate::cmp::Eq; + // Curve specification struct Curve { // Twisted Edwards curve // Coefficients in defining equation a(x^2 + y^2)z^2 = z^4 + dx^2y^2 @@ -220,14 +225,6 @@ mod curvegroup { Self {x, y, t, z} } - // Check for equality - fn eq(self, p: Point) -> bool { - let Self {x: x1, y: y1, t: _t1, z: z1} = self; - let Self {x: x2, y: y2, t: _t2, z:z2} = p; - - (x1*z2 == x2*z1) & (y1*z2 == y2*z1) - } - // Check if zero pub fn is_zero(self) -> bool { let Self {x, y, t, z} = self; @@ -259,6 +256,15 @@ mod curvegroup { } } + impl Eq for Point { + fn eq(self, p: Self) -> bool { + let Self {x: x1, y: y1, t: _t1, z: z1} = self; + let Self {x: x2, y: y2, t: _t2, z:z2} = p; + + (x1*z2 == x2*z1) & (y1*z2 == y2*z1) + } + } + impl Curve { // Curve constructor pub fn new(a: Field, d: Field, gen: Point) -> Curve { diff --git a/noir_stdlib/src/eddsa.nr b/noir_stdlib/src/eddsa.nr index 39051e23233..657e791e9c7 100644 --- a/noir_stdlib/src/eddsa.nr +++ b/noir_stdlib/src/eddsa.nr @@ -1,25 +1,7 @@ use crate::hash::poseidon; use crate::ec::consts::te::baby_jubjub; use crate::ec::tecurve::affine::Point as TEPoint; -// Returns true if x is less than y -fn lt_bytes32(x: Field, y: Field) -> bool { - let x_bytes = x.to_le_bytes(32); - let y_bytes = y.to_le_bytes(32); - let mut x_is_lt = false; - let mut done = false; - for i in 0..32 { - if (!done) { - let x_byte = x_bytes[31 - i] as u8; - let y_byte = y_bytes[31 - i] as u8; - let bytes_match = x_byte == y_byte; - if !bytes_match { - x_is_lt = x_byte < y_byte; - done = true; - } - } - } - x_is_lt -} + // Returns true if signature is valid pub fn eddsa_poseidon_verify( pub_key_x: Field, @@ -39,7 +21,7 @@ pub fn eddsa_poseidon_verify( let signature_r8 = TEPoint::new(signature_r8_x, signature_r8_y); assert(bjj.curve.contains(signature_r8)); // Ensure S < Subgroup Order - assert(lt_bytes32(signature_s, bjj.suborder)); + assert(signature_s.lt(bjj.suborder)); // Calculate the h = H(R, A, msg) let hash: Field = poseidon::bn254::hash_5([signature_r8_x, signature_r8_y, pub_key_x, pub_key_y, message]); // Calculate second part of the right side: right2 = h*8*A diff --git a/noir_stdlib/src/field.nr b/noir_stdlib/src/field.nr index b4cb9b64e3c..fbd76a1e8a2 100644 --- a/noir_stdlib/src/field.nr +++ b/noir_stdlib/src/field.nr @@ -1,3 +1,6 @@ +mod bn254; +use bn254::lt as bn254_lt; + impl Field { pub fn to_le_bits(self: Self, bit_size: u32) -> [u1] { crate::assert_constant(bit_size); @@ -15,6 +18,15 @@ impl Field { #[builtin(to_be_bits)] fn __to_be_bits(_self: Self, _bit_size: u32) -> [u1] {} + #[builtin(apply_range_constraint)] + fn __assert_max_bit_size(_self: Self, _bit_size: u32) {} + + pub fn assert_max_bit_size(self: Self, bit_size: u32) { + crate::assert_constant(bit_size); + assert(bit_size < modulus_num_bits() as u32); + self.__assert_max_bit_size(bit_size); + } + pub fn to_le_bytes(self: Self, byte_size: u32) -> [u8] { self.to_le_radix(256, byte_size) } @@ -65,6 +77,15 @@ impl Field { pub fn sgn0(self) -> u1 { self as u1 } + + pub fn lt(self, another: Field) -> bool { + if crate::compat::is_bn254() { + bn254_lt(self, another) + } else { + lt_fallback(self, another) + } + } + } #[builtin(modulus_num_bits)] @@ -96,3 +117,24 @@ pub fn bytes32_to_field(bytes32: [u8; 32]) -> Field { // Abuse that a % p + b % p = (a + b) % p and that low < p low + high * v } + +fn lt_fallback(x: Field, y: Field) -> bool { + let num_bytes = (modulus_num_bits() as u32 + 7) / 8; + let x_bytes = x.to_le_bytes(num_bytes); + let y_bytes = y.to_le_bytes(num_bytes); + let mut x_is_lt = false; + let mut done = false; + for i in 0..num_bytes { + if (!done) { + let x_byte = x_bytes[num_bytes - 1 - i] as u8; + let y_byte = y_bytes[num_bytes - 1 - i] as u8; + let bytes_match = x_byte == y_byte; + if !bytes_match { + x_is_lt = x_byte < y_byte; + done = true; + } + } + } + x_is_lt +} + diff --git a/noir_stdlib/src/field/bn254.nr b/noir_stdlib/src/field/bn254.nr new file mode 100644 index 00000000000..f6e23f8db0c --- /dev/null +++ b/noir_stdlib/src/field/bn254.nr @@ -0,0 +1,92 @@ +global PLO: Field = 53438638232309528389504892708671455233; +global PHI: Field = 64323764613183177041862057485226039389; +global TWO_POW_128: Field = 0x100000000000000000000000000000000; + +unconstrained fn decompose_unsafe(x: Field) -> (Field, Field) { + let x_bytes = x.to_le_bytes(32); + + let mut low: Field = 0; + let mut high: Field = 0; + + let mut offset = 1; + for i in 0..16 { + low += (x_bytes[i] as Field) * offset; + high += (x_bytes[i + 16] as Field) * offset; + offset *= 256; + } + + (low, high) +} + +pub fn decompose(x: Field) -> (Field, Field) { + let (xlo, xhi) = decompose_unsafe(x); + let borrow = lt_unsafe(PLO, xlo, 16); + + xlo.assert_max_bit_size(128); + xhi.assert_max_bit_size(128); + + assert_eq(x, xlo + TWO_POW_128 * xhi); + let rlo = PLO - xlo + (borrow as Field) * TWO_POW_128; + let rhi = PHI - xhi - (borrow as Field); + + rlo.assert_max_bit_size(128); + rhi.assert_max_bit_size(128); + + (xlo, xhi) +} + +unconstrained fn lt_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { + let x_bytes = x.__to_le_radix(256, num_bytes); + let y_bytes = y.__to_le_radix(256, num_bytes); + let mut x_is_lt = false; + let mut done = false; + for i in 0..num_bytes { + if (!done) { + let x_byte = x_bytes[num_bytes - 1 - i]; + let y_byte = y_bytes[num_bytes - 1 - i]; + let bytes_match = x_byte == y_byte; + if !bytes_match { + x_is_lt = x_byte < y_byte; + done = true; + } + } + } + x_is_lt +} + +unconstrained fn lte_unsafe(x: Field, y: Field, num_bytes: u32) -> bool { + lt_unsafe(x, y, num_bytes) | (x == y) +} + +pub fn assert_gt(a: Field, b: Field) { + let (alo, ahi) = decompose(a); + let (blo, bhi) = decompose(b); + + let borrow = lte_unsafe(alo, blo, 16); + + let rlo = alo - blo - 1 + (borrow as Field) * TWO_POW_128; + let rhi = ahi - bhi - (borrow as Field); + + rlo.assert_max_bit_size(128); + rhi.assert_max_bit_size(128); +} + +pub fn assert_lt(a: Field, b: Field) { + assert_gt(b, a); +} + +pub fn gt(a: Field, b: Field) -> bool { + if a == b { + false + } else if lt_unsafe(a, b, 32) { + assert_gt(b, a); + false + } else { + assert_gt(a, b); + true + } +} + +pub fn lt(a: Field, b: Field) -> bool { + gt(b, a) +} diff --git a/noir_stdlib/src/hash.nr b/noir_stdlib/src/hash.nr index 157d6518367..5933209d9bc 100644 --- a/noir_stdlib/src/hash.nr +++ b/noir_stdlib/src/hash.nr @@ -7,6 +7,9 @@ pub fn sha256(_input: [u8; N]) -> [u8; 32] {} #[foreign(blake2s)] pub fn blake2s(_input: [u8; N]) -> [u8; 32] {} +#[foreign(blake3)] +pub fn blake3(_input: [u8; N]) -> [u8; 32] {} + struct PedersenPoint { x : Field, y : Field, @@ -16,7 +19,7 @@ pub fn pedersen_commitment(input: [Field; N]) -> PedersenPoint { pedersen_commitment_with_separator(input, 0) } -#[foreign(pedersen)] +#[foreign(pedersen_commitment)] pub fn __pedersen_commitment_with_separator(_input: [Field; N], _separator: u32) -> [Field; 2] {} pub fn pedersen_commitment_with_separator(input: [Field; N], separator: u32) -> PedersenPoint { diff --git a/noir_stdlib/src/lib.nr b/noir_stdlib/src/lib.nr index 70b4681b54d..23a7c71ff45 100644 --- a/noir_stdlib/src/lib.nr +++ b/noir_stdlib/src/lib.nr @@ -19,9 +19,11 @@ mod compat; mod option; mod string; mod test; +mod cmp; mod ops; mod default; mod prelude; +mod uint128; // Oracle calls are required to be wrapped in an unconstrained function // Thus, the only argument to the `println` oracle is expected to always be an ident @@ -37,13 +39,8 @@ unconstrained pub fn println(input: T) { } #[foreign(recursive_aggregation)] -pub fn verify_proof( - _verification_key: [Field], - _proof: [Field], - _public_inputs: [Field], - _key_hash: Field, - _input_aggregation_object: [Field; N] -) -> [Field; N] {} +pub fn verify_proof(_verification_key: [Field], _proof: [Field], _public_inputs: [Field], _key_hash: Field) {} + // Asserts that the given value is known at compile-time. // Useful for debugging for-loop bounds. #[builtin(assert_constant)] diff --git a/noir_stdlib/src/ops.nr b/noir_stdlib/src/ops.nr index 23acc2f0e5d..3078ac11296 100644 --- a/noir_stdlib/src/ops.nr +++ b/noir_stdlib/src/ops.nr @@ -63,55 +63,94 @@ impl Div for i16 { fn div(self, other: i16) -> i16 { self / other } } impl Div for i32 { fn div(self, other: i32) -> i32 { self / other } } impl Div for i64 { fn div(self, other: i64) -> i64 { self / other } } -trait Eq { - fn eq(self, other: Self) -> bool; +trait Rem { + fn rem(self, other: Self) -> Self; } -impl Eq for Field { fn eq(self, other: Field) -> bool { self == other } } - -impl Eq for u8 { fn eq(self, other: u8) -> bool { self == other } } -impl Eq for u16 { fn eq(self, other: u16) -> bool { self == other } } -impl Eq for u32 { fn eq(self, other: u32) -> bool { self == other } } -impl Eq for u64 { fn eq(self, other: u64) -> bool { self == other } } - -impl Eq for i8 { fn eq(self, other: i8) -> bool { self == other } } -impl Eq for i16 { fn eq(self, other: i16) -> bool { self == other } } -impl Eq for i32 { fn eq(self, other: i32) -> bool { self == other } } -impl Eq for i64 { fn eq(self, other: i64) -> bool { self == other } } - -impl Eq for () { fn eq(_self: Self, _other: ()) -> bool { true } } -impl Eq for bool { fn eq(self, other: bool) -> bool { self == other } } - -impl Eq for [T; N] where T: Eq { - fn eq(self, other: [T; N]) -> bool { - let mut result = true; - for i in 0 .. self.len() { - result &= self[i].eq(other[i]); - } - result - } +impl Rem for u8 { fn rem(self, other: u8) -> u8 { self % other } } +impl Rem for u16 { fn rem(self, other: u16) -> u16 { self % other } } +impl Rem for u32 { fn rem(self, other: u32) -> u32 { self % other } } +impl Rem for u64 { fn rem(self, other: u64) -> u64 { self % other } } + +impl Rem for i8 { fn rem(self, other: i8) -> i8 { self % other } } +impl Rem for i16 { fn rem(self, other: i16) -> i16 { self % other } } +impl Rem for i32 { fn rem(self, other: i32) -> i32 { self % other } } +impl Rem for i64 { fn rem(self, other: i64) -> i64 { self % other } } + +trait BitOr { + fn bitor(self, other: Self) -> Self; } -impl Eq for (A, B) where A: Eq, B: Eq { - fn eq(self, other: (A, B)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) - } +impl BitOr for bool { fn bitor(self, other: bool) -> bool { self | other } } + +impl BitOr for u8 { fn bitor(self, other: u8) -> u8 { self | other } } +impl BitOr for u16 { fn bitor(self, other: u16) -> u16 { self | other } } +impl BitOr for u32 { fn bitor(self, other: u32) -> u32 { self | other } } +impl BitOr for u64 { fn bitor(self, other: u64) -> u64 { self | other } } + +impl BitOr for i8 { fn bitor(self, other: i8) -> i8 { self | other } } +impl BitOr for i16 { fn bitor(self, other: i16) -> i16 { self | other } } +impl BitOr for i32 { fn bitor(self, other: i32) -> i32 { self | other } } +impl BitOr for i64 { fn bitor(self, other: i64) -> i64 { self | other } } + +trait BitAnd { + fn bitand(self, other: Self) -> Self; } -impl Eq for (A, B, C) where A: Eq, B: Eq, C: Eq { - fn eq(self, other: (A, B, C)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) - } +impl BitAnd for bool { fn bitand(self, other: bool) -> bool { self & other } } + +impl BitAnd for u8 { fn bitand(self, other: u8) -> u8 { self & other } } +impl BitAnd for u16 { fn bitand(self, other: u16) -> u16 { self & other } } +impl BitAnd for u32 { fn bitand(self, other: u32) -> u32 { self & other } } +impl BitAnd for u64 { fn bitand(self, other: u64) -> u64 { self & other } } + +impl BitAnd for i8 { fn bitand(self, other: i8) -> i8 { self & other } } +impl BitAnd for i16 { fn bitand(self, other: i16) -> i16 { self & other } } +impl BitAnd for i32 { fn bitand(self, other: i32) -> i32 { self & other } } +impl BitAnd for i64 { fn bitand(self, other: i64) -> i64 { self & other } } + +trait BitXor { + fn bitxor(self, other: Self) -> Self; } -impl Eq for (A, B, C, D) where A: Eq, B: Eq, C: Eq, D: Eq { - fn eq(self, other: (A, B, C, D)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) - } +impl BitXor for bool { fn bitxor(self, other: bool) -> bool { self ^ other } } + +impl BitXor for u8 { fn bitxor(self, other: u8) -> u8 { self ^ other } } +impl BitXor for u16 { fn bitxor(self, other: u16) -> u16 { self ^ other } } +impl BitXor for u32 { fn bitxor(self, other: u32) -> u32 { self ^ other } } +impl BitXor for u64 { fn bitxor(self, other: u64) -> u64 { self ^ other } } + +impl BitXor for i8 { fn bitxor(self, other: i8) -> i8 { self ^ other } } +impl BitXor for i16 { fn bitxor(self, other: i16) -> i16 { self ^ other } } +impl BitXor for i32 { fn bitxor(self, other: i32) -> i32 { self ^ other } } +impl BitXor for i64 { fn bitxor(self, other: i64) -> i64 { self ^ other } } + +trait Shl { + fn shl(self, other: Self) -> Self; } -impl Eq for (A, B, C, D, E) where A: Eq, B: Eq, C: Eq, D: Eq, E: Eq { - fn eq(self, other: (A, B, C, D, E)) -> bool { - self.0.eq(other.0) & self.1.eq(other.1) & self.2.eq(other.2) & self.3.eq(other.3) & self.4.eq(other.4) - } +impl Shl for u8 { fn shl(self, other: u8) -> u8 { self << other } } +impl Shl for u16 { fn shl(self, other: u16) -> u16 { self << other } } +impl Shl for u32 { fn shl(self, other: u32) -> u32 { self << other } } +impl Shl for u64 { fn shl(self, other: u64) -> u64 { self << other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shl for i8 { fn shl(self, other: i8) -> i8 { self << other } } +// impl Shl for i16 { fn shl(self, other: i16) -> i16 { self << other } } +// impl Shl for i32 { fn shl(self, other: i32) -> i32 { self << other } } +// impl Shl for i64 { fn shl(self, other: i64) -> i64 { self << other } } + +trait Shr { + fn shr(self, other: Self) -> Self; } + +impl Shr for u8 { fn shr(self, other: u8) -> u8 { self >> other } } +impl Shr for u16 { fn shr(self, other: u16) -> u16 { self >> other } } +impl Shr for u32 { fn shr(self, other: u32) -> u32 { self >> other } } +impl Shr for u64 { fn shr(self, other: u64) -> u64 { self >> other } } + +// Bit shifting is not currently supported for signed integer types +// impl Shr for i8 { fn shr(self, other: i8) -> i8 { self >> other } } +// impl Shr for i16 { fn shr(self, other: i16) -> i16 { self >> other } } +// impl Shr for i32 { fn shr(self, other: i32) -> i32 { self >> other } } +// impl Shr for i64 { fn shr(self, other: i64) -> i64 { self >> other } } diff --git a/noir_stdlib/src/prelude.nr b/noir_stdlib/src/prelude.nr index f33a1f7e7f1..b57ff460371 100644 --- a/noir_stdlib/src/prelude.nr +++ b/noir_stdlib/src/prelude.nr @@ -1,3 +1,6 @@ use crate::collections::vec::Vec; use crate::option::Option; use crate::{print, println, assert_constant}; +use crate::uint128::U128; +use crate::cmp::{Eq, Ord}; +use crate::default::Default; diff --git a/noir_stdlib/src/uint128.nr b/noir_stdlib/src/uint128.nr new file mode 100644 index 00000000000..c8c6217de90 --- /dev/null +++ b/noir_stdlib/src/uint128.nr @@ -0,0 +1,305 @@ +use crate::ops::{Add, Sub, Mul, Div, Rem, BitOr, BitAnd, BitXor, Shl, Shr}; +use crate::cmp::{Eq, Ord, Ordering}; + +global pow64 : Field = 18446744073709551616; //2^64; + +struct U128 { + lo: Field, + hi: Field, +} + +impl U128 { + + pub fn from_u64s_le(lo: u64, hi: u64) -> U128 { + // in order to handle multiplication, we need to represent the product of two u64 without overflow + assert(crate::field::modulus_num_bits() as u32 > 128); + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + pub fn from_u64s_be(hi: u64, lo: u64) -> U128 { + U128::from_u64s_le(lo,hi) + } + + pub fn from_le_bytes(bytes: [u8; 16]) -> U128 { + let mut lo = 0; + let mut base = 1; + for i in 0..8 { + lo += (bytes[i] as Field)*base; + base *= 256; + } + let mut hi = 0; + base = 1; + for i in 8..16 { + hi += (bytes[i] as Field)*base; + base *= 256; + } + U128 { + lo, + hi, + } + } + + pub fn to_be_bytes(self: Self) -> [u8; 16] { + let lo = self.lo.to_be_bytes(8); + let hi = self.hi.to_be_bytes(8); + let mut bytes = [0;16]; + for i in 0..8 { + bytes[i] = hi[i]; + bytes[i+8] = lo[i]; + } + bytes + } + + pub fn to_le_bytes(self: Self) -> [u8; 16] { + let lo = self.lo.to_le_bytes(8); + let hi = self.hi.to_le_bytes(8); + let mut bytes = [0;16]; + for i in 0..8 { + bytes[i] = lo[i]; + bytes[i+8] = hi[i]; + } + bytes + } + + pub fn from_hex(hex: str) -> U128 { + let N = N as u32; + let bytes = hex.as_bytes(); + // string must starts with "0x" + assert((bytes[0] == 48) & (bytes[1] == 120), "Invalid hexadecimal string"); + assert(N < 35, "Input does not fit into a U128"); + + let mut lo = 0; + let mut hi = 0; + let mut base = 1; + if N <= 18 { + for i in 0..N-2 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + } else { + for i in 0..16 { + lo += U128::decode_ascii(bytes[N-i-1])*base; + base = base*16; + } + base = 1; + for i in 17..N-1 { + hi += U128::decode_ascii(bytes[N-i])*base; + base = base*16; + } + } + U128 { + lo: lo as Field, + hi: hi as Field, + } + } + + fn decode_ascii(ascii: u8) -> Field { + if ascii < 58 { + ascii - 48 + } else { + if ascii < 71 { + ascii - 55 + } else { + ascii - 87 + } + + } as Field + } + + unconstrained fn unconstrained_div(self: Self, b: U128) -> (U128, U128) { + if self < b { + (U128::from_u64s_le(0, 0), self) + } else { + //TODO check if this can overflow? + let (q,r) = self.unconstrained_div(b * U128::from_u64s_le(2,0)); + let q_mul_2 = q * U128::from_u64s_le(2,0); + if r < b { + (q_mul_2, r) + } else { + (q_mul_2 + U128::from_u64s_le(1,0), r - b) + } + + } + } + + pub fn from_integer(i: T) -> U128 { + let f = crate::as_field(i); + // Reject values which would overflow a u128 + f.assert_max_bit_size(128); + let lo = f as u64 as Field; + let hi = (f-lo) / pow64; + U128 { + lo, + hi, + } + } + + pub fn to_integer(self) -> T { + crate::from_field(self.lo+self.hi*pow64) + } + + fn wrapping_mul(self: Self, b: U128) -> U128 { + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + U128 { + lo, + hi, + } + } +} + +impl Add for U128 { + fn add(self: Self, b: U128) -> U128 { + let low = self.lo + b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = self.hi + b.hi + carry; + let hi = high as u64 as Field; + assert(hi == high, "attempt to add with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Sub for U128 { + fn sub(self: Self, b: U128) -> U128 { + let low = pow64 + self.lo - b.lo; + let lo = low as u64 as Field; + let borrow = (low == lo) as Field; + let high = self.hi - b.hi - borrow; + let hi = high as u64 as Field; + assert(hi == high, "attempt to subtract with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Mul for U128 { + fn mul(self: Self, b: U128) -> U128 { + assert(self.hi*b.hi == 0, "attempt to multiply with overflow"); + let low = self.lo*b.lo; + let lo = low as u64 as Field; + let carry = (low - lo) / pow64; + let high = if crate::field::modulus_num_bits() as u32 > 196 { + (self.lo+self.hi)*(b.lo+b.hi) - low + carry + } else { + self.lo*b.hi + self.hi*b.lo + carry + }; + let hi = high as u64 as Field; + assert(hi == high, "attempt to multiply with overflow"); + U128 { + lo, + hi, + } + } +} + +impl Div for U128 { + fn div(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + q + } +} + +impl Rem for U128 { + fn rem(self: Self, b: U128) -> U128 { + let (q,r) = self.unconstrained_div(b); + let a = b * q + r; + assert_eq(self, a); + assert(r < b); + r + } +} + +impl Eq for U128 { + fn eq(self: Self, b: U128) -> bool { + (self.lo == b.lo) & (self.hi == b.hi) + } +} + +impl Ord for U128 { + fn cmp(self, other: Self) -> Ordering { + let hi_ordering = (self.hi as u64).cmp((other.hi as u64)); + let lo_ordering = (self.lo as u64).cmp((other.lo as u64)); + + if hi_ordering == Ordering::equal() { + lo_ordering + } else { + hi_ordering + } + } +} + +impl BitOr for U128 { + fn bitor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) | (other.lo as u64)) as Field, + hi: ((self.hi as u64) | (other.hi as u64))as Field + } + } +} + +impl BitAnd for U128 { + fn bitand(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) & (other.lo as u64)) as Field, + hi: ((self.hi as u64) & (other.hi as u64)) as Field + } + } +} + +impl BitXor for U128 { + fn bitxor(self, other: U128) -> U128 { + U128 { + lo: ((self.lo as u64) ^ (other.lo as u64)) as Field, + hi: ((self.hi as u64) ^ (other.hi as u64)) as Field + } + } +} + +impl Shl for U128 { + fn shl(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift left with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self.wrapping_mul(U128::from_integer(y)) + } +} + +impl Shr for U128 { + fn shr(self, other: U128) -> U128 { + assert(other < U128::from_u64s_le(128,0), "attempt to shift right with overflow"); + let exp_bits = other.lo.to_be_bits(7); + + let mut r: Field = 2; + let mut y: Field = 1; + for i in 1..8 { + y = (exp_bits[7-i] as Field) * (r * y) + (1 - exp_bits[7-i] as Field) * y; + r *= r; + } + self / U128::from_integer(y) + } +} diff --git a/package.json b/package.json index 20a5b87cfd8..e70189b5522 100644 --- a/package.json +++ b/package.json @@ -10,7 +10,6 @@ "tooling/noir_codegen", "tooling/noir_js_backend_barretenberg", "acvm-repo/acvm_js", - "release-tests", "docs" ], "scripts": { @@ -21,6 +20,7 @@ "clean:root": "rm -rf ./result ./target ./packages", "clean": "yarn clean:workspaces && yarn clean:root", "lint": "yarn workspaces foreach --verbose run lint", + "spellcheck": "cspell '**/*.{md,rs}' -c ./cspell.json", "install:acvm_js": "yarn workspace @noir-lang/acvm_js run install:from:nix", "install:noir_wasm": "yarn workspace @noir-lang/noir_wasm run install:from:nix", "install:noirc_abi_wasm": "yarn workspace @noir-lang/noirc_abi run install:from:nix", @@ -37,6 +37,7 @@ "@typescript-eslint/eslint-plugin": "^6.7.3", "@typescript-eslint/parser": "^6.7.3", "chai": "^4.3.7", + "cspell": "^8.3.2", "eslint": "^8.50.0", "eslint-plugin-prettier": "^5.0.0", "mocha": "^10.2.0", diff --git a/release-please-config.json b/release-please-config.json index 562de471f0b..e73993ca974 100644 --- a/release-please-config.json +++ b/release-please-config.json @@ -65,7 +65,6 @@ "blackbox_solver/Cargo.toml", "brillig/Cargo.toml", "brillig_vm/Cargo.toml", - "stdlib/Cargo.toml", { "type": "json", "path": "acvm_js/package.json", @@ -81,5 +80,6 @@ }, "plugins": [ "sentence-case" - ] -} + ], + "bootstrap-sha": "690cfc0468de0b9aee53ccfe832c71c16e61e5fc" +} \ No newline at end of file diff --git a/release-tests/package.json b/release-tests/package.json deleted file mode 100644 index 3b73ad18574..00000000000 --- a/release-tests/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "release-tests", - "main": "index.js", - "license": "(MIT OR Apache-2.0)", - "private": true, - "type": "module", - "scripts": { - "test": "uvu test" - }, - "devDependencies": { - "uvu": "0.5.6", - "zx": "7.1.1" - } -} diff --git a/release-tests/test/6_array.test.js b/release-tests/test/6_array.test.js deleted file mode 100644 index 43d4a389264..00000000000 --- a/release-tests/test/6_array.test.js +++ /dev/null @@ -1,49 +0,0 @@ -import { suite } from "uvu"; -import { cd } from "zx"; -import { NARGO_BIN } from "./utils/nargo.js"; -import "./utils/zx.js"; - -const test = suite("nargo"); - -// Helps detect unresolved ProcessPromise. -let promiseResolved = false; -process.on("exit", () => { - if (!promiseResolved) { - console.error("Error: ProcessPromise never resolved."); - process.exitCode = 1; - } -}); - -test("promise resolved", async () => { - await $`echo PromiseHelper`; - promiseResolved = true; -}); - -test("nargo builds ../test_programs/execution_success/6_array sucessfully", async () => { - await within(async () => { - cd("../test_programs/execution_success/6_array"); - const command = `${NARGO_BIN} check`; - - await $`${command}`.nothrow(); - }); -}); - -test("nargo creates proof ../test_programs/execution_success/6_array sucessfully", async () => { - await within(async () => { - cd("../test_programs/execution_success/6_array"); - const command = `${NARGO_BIN} prove 6_array`; - - await $`${command}`.nothrow(); - }); -}); - -test("nargo verifies proof ../test_programs/execution_success/6_array sucessfully", async () => { - await within(async () => { - cd("../test_programs/execution_success/6_array"); - const command = `${NARGO_BIN} verify 6_array`; - - await $`${command}`.nothrow(); - }); -}); - -test.run(); diff --git a/release-tests/test/utils/nargo.js b/release-tests/test/utils/nargo.js deleted file mode 100644 index 537cdfc8be5..00000000000 --- a/release-tests/test/utils/nargo.js +++ /dev/null @@ -1,3 +0,0 @@ -import { default as path } from "node:path"; - -export const NARGO_BIN = process.env.NARGO_BIN ? path.resolve(process.env.NARGO_BIN) : "nargo"; diff --git a/release-tests/test/utils/zx.js b/release-tests/test/utils/zx.js deleted file mode 100644 index a8ab500aec0..00000000000 --- a/release-tests/test/utils/zx.js +++ /dev/null @@ -1,11 +0,0 @@ -import "zx/globals"; - -// We perform any common setup for zx here to avoid repetition across test files. - -if (process.platform == "win32") { - $.shell = "powershell"; -} - -$.quote = (arg) => arg; - -$.verbose = true; diff --git a/release-tests/test/version.test.js b/release-tests/test/version.test.js deleted file mode 100644 index 7a70639d83e..00000000000 --- a/release-tests/test/version.test.js +++ /dev/null @@ -1,35 +0,0 @@ -import { suite } from "uvu"; -import * as assert from "uvu/assert"; -import { NARGO_BIN } from "./utils/nargo.js"; -import "./utils/zx.js"; - -const test = suite("nargo"); - -// Helps detect unresolved ProcessPromise. -let promiseResolved = false; -process.on("exit", () => { - if (!promiseResolved) { - console.error("Error: ProcessPromise never resolved."); - process.exitCode = 1; - } -}); - -test("promise resolved", async () => { - await $`echo PromiseHelper`; - promiseResolved = true; -}); - -test("prints version", async () => { - const processOutput = (await $`${NARGO_BIN} --version`).toString(); - - // Regex to match the "nargo version" part of the output - assert.match(processOutput, /nargo version = \d{1,2}\.\d{1,2}\.\d{1,2}/); -}); - - -test("reports a clean commit", async () => { - const processOutput = (await $`${NARGO_BIN} --version`).toString(); - assert.not.match(processOutput, /is dirty: true/) -}); - -test.run(); diff --git a/scripts/bootstrap_native.sh b/scripts/bootstrap_native.sh index 481c76a263e..3e0e2ed853a 100755 --- a/scripts/bootstrap_native.sh +++ b/scripts/bootstrap_native.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. @@ -13,4 +13,8 @@ else fi # Build native. -cargo build --release +if [ -n "${DEBUG:-}" ]; then + cargo build +else + cargo build --release +fi diff --git a/scripts/bootstrap_packages.sh b/scripts/bootstrap_packages.sh index 2f293d93faf..18c34b9cfb7 100755 --- a/scripts/bootstrap_packages.sh +++ b/scripts/bootstrap_packages.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. @@ -14,7 +14,7 @@ else export GIT_COMMIT=$(git rev-parse --verify HEAD) fi -yarn +yarn --immutable yarn build # We create a folder called packages, that contains each package as it would be published to npm, named correctly. diff --git a/scripts/install_wasm-bindgen.sh b/scripts/install_wasm-bindgen.sh index 5e9f9127506..c6e85bac50b 100755 --- a/scripts/install_wasm-bindgen.sh +++ b/scripts/install_wasm-bindgen.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -eu cd $(dirname "$0")/.. diff --git a/scripts/nargo_compile_noir_codegen_assert_lt.sh b/scripts/nargo_compile_noir_codegen_assert_lt.sh deleted file mode 100755 index 858a16cf517..00000000000 --- a/scripts/nargo_compile_noir_codegen_assert_lt.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -cd ./tooling/noir_codegen/test/assert_lt -nargo compile \ No newline at end of file diff --git a/scripts/nargo_compile_wasm_fixtures.sh b/scripts/nargo_compile_wasm_fixtures.sh index c047888e405..95bb698c8a2 100755 --- a/scripts/nargo_compile_wasm_fixtures.sh +++ b/scripts/nargo_compile_wasm_fixtures.sh @@ -1,8 +1,7 @@ #!/bin/bash -cd ./compiler/wasm/fixtures -for dir in $(ls -d */); do - pushd $dir/noir-script - nargo compile - popd -done +fixtures_dir="./compiler/wasm/test/fixtures" + +nargo compile --program-dir=$fixtures_dir/noir-contract +nargo compile --program-dir=$fixtures_dir/simple +nargo compile --program-dir=$fixtures_dir/with-deps diff --git a/scripts/test_js_packages.sh b/scripts/test_js_packages.sh index a5ec5b92a70..cf4fd81326d 100755 --- a/scripts/test_js_packages.sh +++ b/scripts/test_js_packages.sh @@ -17,10 +17,10 @@ fi cargo build --release export PATH="${PATH}:/usr/src/noir/target/release/" -yarn +yarn --immutable yarn build npx playwright install npx playwright install-deps ./scripts/test.sh -yarn test \ No newline at end of file +yarn test diff --git a/test_programs/.gitignore b/test_programs/.gitignore index 01a3426160c..a229df6197f 100644 --- a/test_programs/.gitignore +++ b/test_programs/.gitignore @@ -1 +1,2 @@ acir_artifacts +execution_success/**/crs \ No newline at end of file diff --git a/test_programs/execution_success/brillig_nested_slices/Nargo.toml b/test_programs/compile_failure/brillig_nested_slices/Nargo.toml similarity index 100% rename from test_programs/execution_success/brillig_nested_slices/Nargo.toml rename to test_programs/compile_failure/brillig_nested_slices/Nargo.toml diff --git a/test_programs/execution_success/brillig_nested_slices/Prover.toml b/test_programs/compile_failure/brillig_nested_slices/Prover.toml similarity index 100% rename from test_programs/execution_success/brillig_nested_slices/Prover.toml rename to test_programs/compile_failure/brillig_nested_slices/Prover.toml diff --git a/test_programs/execution_success/brillig_nested_slices/src/main.nr b/test_programs/compile_failure/brillig_nested_slices/src/main.nr similarity index 100% rename from test_programs/execution_success/brillig_nested_slices/src/main.nr rename to test_programs/compile_failure/brillig_nested_slices/src/main.nr diff --git a/test_programs/compile_failure/cyclic_dep/Nargo.toml b/test_programs/compile_failure/cyclic_dep/Nargo.toml new file mode 100644 index 00000000000..6a5a9b7db73 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "cyclic_dep" +type = "bin" +authors = [""] + +[dependencies] +dep1 = { path= "./dep1"} \ No newline at end of file diff --git a/test_programs/execution_success/regression_3635/Prover.toml b/test_programs/compile_failure/cyclic_dep/Prover.toml similarity index 100% rename from test_programs/execution_success/regression_3635/Prover.toml rename to test_programs/compile_failure/cyclic_dep/Prover.toml diff --git a/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml new file mode 100644 index 00000000000..4782bbd5cda --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep1" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep2"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr new file mode 100644 index 00000000000..02b68c56bd2 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep1/src/lib.nr @@ -0,0 +1,3 @@ +fn bar() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml new file mode 100644 index 00000000000..5e2a0f304b0 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "dep2" +type = "lib" +authors = [""] + +[dependencies] +dep1 = { path= "../dep1"} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr new file mode 100644 index 00000000000..298a0f3c7ca --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/dep2/src/lib.nr @@ -0,0 +1,3 @@ +fn foo() { + +} \ No newline at end of file diff --git a/test_programs/compile_failure/cyclic_dep/src/main.nr b/test_programs/compile_failure/cyclic_dep/src/main.nr new file mode 100644 index 00000000000..c55ca748334 --- /dev/null +++ b/test_programs/compile_failure/cyclic_dep/src/main.nr @@ -0,0 +1,7 @@ +use dep1::foo; +use dep2::bar; + +fn main() { + dep1::foo(); + dep2::bar(); +} diff --git a/test_programs/compile_failure/multiple_contracts/Nargo.toml b/test_programs/compile_failure/multiple_contracts/Nargo.toml deleted file mode 100644 index d6e4e632f95..00000000000 --- a/test_programs/compile_failure/multiple_contracts/Nargo.toml +++ /dev/null @@ -1,5 +0,0 @@ -[package] -name = "multiple_contracts" -type = "contract" -authors = [""] -[dependencies] diff --git a/test_programs/compile_failure/multiple_contracts/src/main.nr b/test_programs/compile_failure/multiple_contracts/src/main.nr deleted file mode 100644 index a6c49d75378..00000000000 --- a/test_programs/compile_failure/multiple_contracts/src/main.nr +++ /dev/null @@ -1,3 +0,0 @@ -contract Foo {} - -contract Bar {} diff --git a/test_programs/compile_failure/nested_slice_declared_type/Nargo.toml b/test_programs/compile_failure/nested_slice_declared_type/Nargo.toml new file mode 100644 index 00000000000..b0ba05135ed --- /dev/null +++ b/test_programs/compile_failure/nested_slice_declared_type/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "nested_slice_declared_type" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/nested_slice_declared_type/src/main.nr b/test_programs/compile_failure/nested_slice_declared_type/src/main.nr new file mode 100644 index 00000000000..417f9a092e0 --- /dev/null +++ b/test_programs/compile_failure/nested_slice_declared_type/src/main.nr @@ -0,0 +1,6 @@ +fn main(x: Field, y: pub Field) { + assert(x != y); + + let slice: [[Field]] = []; + assert(slice.len() != 10); +} diff --git a/test_programs/compile_failure/nested_slice_literal/Nargo.toml b/test_programs/compile_failure/nested_slice_literal/Nargo.toml new file mode 100644 index 00000000000..db919955de5 --- /dev/null +++ b/test_programs/compile_failure/nested_slice_literal/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "nested_slice_literal" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/nested_slice_literal/src/main.nr b/test_programs/compile_failure/nested_slice_literal/src/main.nr new file mode 100644 index 00000000000..3140818d0b2 --- /dev/null +++ b/test_programs/compile_failure/nested_slice_literal/src/main.nr @@ -0,0 +1,23 @@ +struct FooParent { + parent_arr: [Field; 3], + foos: [Foo], +} + +struct Bar { + inner: [Field; 3], +} + +struct Foo { + a: Field, + b: T, + bar: Bar, +} + +fn main(x: Field, y: pub Field) { + assert(x != y); + + let foo = Foo { a: 7, b: [8, 9, 22].as_slice(), bar: Bar { inner: [106, 107, 108] } }; + let mut slice = [foo, foo]; + slice = slice.push_back(foo); + assert(slice.len() == 3); +} diff --git a/test_programs/compile_failure/nested_slice_struct/Nargo.toml b/test_programs/compile_failure/nested_slice_struct/Nargo.toml new file mode 100644 index 00000000000..a37fe6c3390 --- /dev/null +++ b/test_programs/compile_failure/nested_slice_struct/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "nested_slice_struct" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/compile_failure/nested_slice_struct/src/main.nr b/test_programs/compile_failure/nested_slice_struct/src/main.nr new file mode 100644 index 00000000000..9fed4cfc299 --- /dev/null +++ b/test_programs/compile_failure/nested_slice_struct/src/main.nr @@ -0,0 +1,18 @@ +struct FooParent { + parent_arr: [Field; 3], + foos: [Foo], +} + +struct Bar { + inner: [Field; 3], +} + +struct Foo { + a: Field, + b: [Field], + bar: Bar, +} + +fn main(x: Field, y: pub Field) { + assert(x != y); +} diff --git a/test_programs/execution_success/slice_struct_field/Nargo.toml b/test_programs/compile_success_empty/field_comparisons/Nargo.toml similarity index 65% rename from test_programs/execution_success/slice_struct_field/Nargo.toml rename to test_programs/compile_success_empty/field_comparisons/Nargo.toml index 9530ebf9271..e8b06655c58 100644 --- a/test_programs/execution_success/slice_struct_field/Nargo.toml +++ b/test_programs/compile_success_empty/field_comparisons/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "slice_struct_field" +name = "field_comparisons" type = "bin" authors = [""] diff --git a/test_programs/compile_success_empty/field_comparisons/Prover.toml b/test_programs/compile_success_empty/field_comparisons/Prover.toml new file mode 100644 index 00000000000..8b137891791 --- /dev/null +++ b/test_programs/compile_success_empty/field_comparisons/Prover.toml @@ -0,0 +1 @@ + diff --git a/test_programs/compile_success_empty/field_comparisons/src/main.nr b/test_programs/compile_success_empty/field_comparisons/src/main.nr new file mode 100644 index 00000000000..48cca6c89fc --- /dev/null +++ b/test_programs/compile_success_empty/field_comparisons/src/main.nr @@ -0,0 +1,86 @@ +use dep::std::field::bn254::{PLO, PHI, TWO_POW_128, decompose, decompose_unsafe, lt_unsafe, lte_unsafe, assert_gt, gt}; + +fn check_plo_phi() { + assert_eq(PLO + PHI * TWO_POW_128, 0); + let p_bytes = dep::std::field::modulus_le_bytes(); + let mut p_low: Field = 0; + let mut p_high: Field = 0; + + let mut offset = 1; + for i in 0..16 { + p_low += (p_bytes[i] as Field) * offset; + p_high += (p_bytes[i + 16] as Field) * offset; + offset *= 256; + } + assert_eq(p_low, PLO); + assert_eq(p_high, PHI); +} + +fn check_decompose_unsafe() { + assert_eq(decompose_unsafe(TWO_POW_128), (0, 1)); + assert_eq(decompose_unsafe(TWO_POW_128 + 0x1234567890), (0x1234567890, 1)); + assert_eq(decompose_unsafe(0x1234567890), (0x1234567890, 0)); +} + +fn check_decompose() { + assert_eq(decompose(TWO_POW_128), (0, 1)); + assert_eq(decompose(TWO_POW_128 + 0x1234567890), (0x1234567890, 1)); + assert_eq(decompose(0x1234567890), (0x1234567890, 0)); +} + +fn check_lt_unsafe() { + assert(lt_unsafe(0, 1, 16)); + assert(lt_unsafe(0, 0x100, 16)); + assert(lt_unsafe(0x100, TWO_POW_128 - 1, 16)); + assert(!lt_unsafe(0, TWO_POW_128, 16)); +} + +fn check_lte_unsafe() { + assert(lte_unsafe(0, 1, 16)); + assert(lte_unsafe(0, 0x100, 16)); + assert(lte_unsafe(0x100, TWO_POW_128 - 1, 16)); + assert(!lte_unsafe(0, TWO_POW_128, 16)); + + assert(lte_unsafe(0, 0, 16)); + assert(lte_unsafe(0x100, 0x100, 16)); + assert(lte_unsafe(TWO_POW_128 - 1, TWO_POW_128 - 1, 16)); + assert(lte_unsafe(TWO_POW_128, TWO_POW_128, 16)); +} + +fn check_assert_gt() { + assert_gt(1, 0); + assert_gt(0x100, 0); + assert_gt((0 - 1), (0 - 2)); + assert_gt(TWO_POW_128, 0); + assert_gt(0 - 1, 0); +} + +fn check_gt() { + assert(gt(1, 0)); + assert(gt(0x100, 0)); + assert(gt((0 - 1), (0 - 2))); + assert(gt(TWO_POW_128, 0)); + assert(!gt(0, 0)); + assert(!gt(0, 0x100)); + assert(gt(0 - 1, 0 - 2)); + assert(!gt(0 - 2, 0 - 1)); +} + +fn checks() { + check_plo_phi(); + check_decompose_unsafe(); + check_decompose(); + check_lt_unsafe(); + check_lte_unsafe(); + check_assert_gt(); + check_gt(); +} + +unconstrained fn checks_in_brillig() { + checks(); +} + +fn main() { + checks(); + checks_in_brillig(); +} diff --git a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr index de3078be8ba..780512f04dc 100644 --- a/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr +++ b/test_programs/compile_success_empty/impl_with_where_clause/src/main.nr @@ -1,27 +1,27 @@ fn main() { let array: [Field; 3] = [1, 2, 3]; - assert(array.eq(array)); + assert(array.my_eq(array)); // Ensure this still works if we have to infer the type of the integer literals let array = [1, 2, 3]; - assert(array.eq(array)); + assert(array.my_eq(array)); } -trait Eq { - fn eq(self, other: Self) -> bool; +trait MyEq { + fn my_eq(self, other: Self) -> bool; } -impl Eq for [T; 3] where T: Eq { - fn eq(self, other: Self) -> bool { +impl MyEq for [T; 3] where T: MyEq { + fn my_eq(self, other: Self) -> bool { let mut ret = true; for i in 0 .. self.len() { - ret &= self[i].eq(other[i]); + ret &= self[i].my_eq(other[i]); } ret } } -impl Eq for Field { - fn eq(self, other: Field) -> bool { +impl MyEq for Field { + fn my_eq(self, other: Field) -> bool { self == other } } diff --git a/test_programs/compile_success_empty/reexports/Nargo.toml b/test_programs/compile_success_empty/reexports/Nargo.toml new file mode 100644 index 00000000000..4a87f28fd89 --- /dev/null +++ b/test_programs/compile_success_empty/reexports/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexports" +type = "bin" +authors = [""] + +[dependencies] +reexporting_lib = { path = "../../test_libraries/reexporting_lib" } diff --git a/test_programs/compile_success_empty/reexports/src/main.nr b/test_programs/compile_success_empty/reexports/src/main.nr new file mode 100644 index 00000000000..bb94b21b221 --- /dev/null +++ b/test_programs/compile_success_empty/reexports/src/main.nr @@ -0,0 +1,8 @@ +use dep::reexporting_lib::{FooStruct, MyStruct, lib}; + +fn main() { + let x: FooStruct = MyStruct { + inner: 0 + }; + assert(lib::is_struct_zero(x)); +} diff --git a/test_programs/execution_success/regression_3635/Nargo.toml b/test_programs/compile_success_empty/regression_3635/Nargo.toml similarity index 100% rename from test_programs/execution_success/regression_3635/Nargo.toml rename to test_programs/compile_success_empty/regression_3635/Nargo.toml diff --git a/test_programs/execution_success/regression_3635/src/main.nr b/test_programs/compile_success_empty/regression_3635/src/main.nr similarity index 100% rename from test_programs/execution_success/regression_3635/src/main.nr rename to test_programs/compile_success_empty/regression_3635/src/main.nr diff --git a/test_programs/compile_success_empty/regression_3964/Nargo.toml b/test_programs/compile_success_empty/regression_3964/Nargo.toml new file mode 100644 index 00000000000..a3fd040bcc2 --- /dev/null +++ b/test_programs/compile_success_empty/regression_3964/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3964" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/test_programs/compile_success_empty/regression_3964/src/main.nr b/test_programs/compile_success_empty/regression_3964/src/main.nr new file mode 100644 index 00000000000..0600a4281a0 --- /dev/null +++ b/test_programs/compile_success_empty/regression_3964/src/main.nr @@ -0,0 +1,5 @@ +fn main() { + let one: u8 = 1; + let p = ((one, 2), (3, 4)); + assert(p == p); +} diff --git a/test_programs/compile_success_empty/trait_default_implementation/src/main.nr b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr index e1f29ce3f48..2f5bff8c40c 100644 --- a/test_programs/compile_success_empty/trait_default_implementation/src/main.nr +++ b/test_programs/compile_success_empty/trait_default_implementation/src/main.nr @@ -1,12 +1,11 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { - x + x } - } struct Foo { @@ -14,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } diff --git a/test_programs/compile_success_empty/trait_generics/Nargo.toml b/test_programs/compile_success_empty/trait_generics/Nargo.toml index c1b5d0aaa6c..7fdd5975541 100644 --- a/test_programs/compile_success_empty/trait_generics/Nargo.toml +++ b/test_programs/compile_success_empty/trait_generics/Nargo.toml @@ -2,5 +2,6 @@ name = "trait_generics" type = "bin" authors = [""] +compiler_version = ">=0.22.0" [dependencies] diff --git a/test_programs/compile_success_empty/trait_generics/src/main.nr b/test_programs/compile_success_empty/trait_generics/src/main.nr index bb6d6e74726..9a3c54c3fa1 100644 --- a/test_programs/compile_success_empty/trait_generics/src/main.nr +++ b/test_programs/compile_success_empty/trait_generics/src/main.nr @@ -1,59 +1,57 @@ -struct Empty {} -trait Foo { - fn foo(self) -> u32; -} +fn main() { + let xs: [Field; 1] = [3]; + let ys: [u32; 1] = [3]; + foo(xs, ys); -impl Foo for Empty { - fn foo(_self: Self) -> u32 { 32 } + assert_eq(15, sum(Data { a: 5, b: 10 })); + assert_eq(15, sum_static(Data { a: 5, b: 10 })); } -impl Foo for Empty { - fn foo(_self: Self) -> u32 { 64 } +fn foo(x: T, u: U) where T: Into, U: Eq { + assert(x.into() == u); } -fn main() { - let x: Empty = Empty {}; - let y: Empty = Empty {}; - let z = Empty {}; - - assert(x.foo() == 32); - assert(y.foo() == 64); - // Types matching multiple impls will currently choose - // the first matching one instead of erroring - assert(z.foo() == 32); - - call_impl_with_generic_struct(); - call_impl_with_generic_function(); +trait Into { + fn into(self) -> T; } -// Ensure we can call a generic impl -fn call_impl_with_generic_struct() { - let x: u8 = 7; - let y: i8 = 8; - let s2_u8 = S2 { x }; - let s2_i8 = S2 { x: y }; - assert(s2_u8.t2().x == 7); - assert(s2_i8.t2().x == 8); + +impl Into<[U; N]> for [T; N] where T: Into { + fn into(self) -> [U; N] { + self.map(|x: T| x.into()) + } } -trait T2 { - fn t2(self) -> Self; +impl Into for Field { + fn into(self) -> u32 { + self as u32 + } } -struct S2 { x: T } +/// Serialize example + +trait Serializable { + fn serialize(self) -> [Field; N]; +} -impl T2 for S2 { - fn t2(self) -> Self { self } +struct Data { + a: Field, + b: Field, } -fn call_impl_with_generic_function() { - assert(3.t3(7) == 7); +impl Serializable<2> for Data { + fn serialize(self) -> [Field; 2] { + [self.a, self.b] + } } -trait T3 { - fn t3(self, x: T) -> T; +fn sum(data: T) -> Field where T: Serializable { + let serialized = data.serialize(); + serialized.fold(0, |acc, elem| acc + elem) } -impl T3 for u32 { - fn t3(self, y: U) -> U { y } +// Test static trait method syntax +fn sum_static(data: T) -> Field where T: Serializable { + let serialized = Serializable::serialize(data); + serialized.fold(0, |acc, elem| acc + elem) } diff --git a/test_programs/compile_success_empty/trait_impl_generics/Nargo.toml b/test_programs/compile_success_empty/trait_impl_generics/Nargo.toml new file mode 100644 index 00000000000..b10b5dab6aa --- /dev/null +++ b/test_programs/compile_success_empty/trait_impl_generics/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "trait_impl_generics" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/compile_success_empty/trait_impl_generics/src/main.nr b/test_programs/compile_success_empty/trait_impl_generics/src/main.nr new file mode 100644 index 00000000000..c46c41cbdd7 --- /dev/null +++ b/test_programs/compile_success_empty/trait_impl_generics/src/main.nr @@ -0,0 +1,59 @@ +struct Empty {} + +trait Foo { + fn foo(self) -> u32; +} + +impl Foo for Empty { + fn foo(_self: Self) -> u32 { 32 } +} + +impl Foo for Empty { + fn foo(_self: Self) -> u32 { 64 } +} + +fn main() { + let x: Empty = Empty {}; + let y: Empty = Empty {}; + let z = Empty {}; + + assert(x.foo() == 32); + assert(y.foo() == 64); + // Types matching multiple impls will currently choose + // the first matching one instead of erroring + assert(z.foo() == 32); + + call_impl_with_generic_struct(); + call_impl_with_generic_function(); +} +// Ensure we can call a generic impl +fn call_impl_with_generic_struct() { + let x: u8 = 7; + let y: i8 = 8; + let s2_u8 = S2 { x }; + let s2_i8 = S2 { x: y }; + assert(s2_u8.t2().x == 7); + assert(s2_i8.t2().x == 8); +} + +trait T2 { + fn t2(self) -> Self; +} + +struct S2 { x: T } + +impl T2 for S2 { + fn t2(self) -> Self { self } +} + +fn call_impl_with_generic_function() { + assert(3.t3(7) == 7); +} + +trait T3 { + fn t3(self, x: T) -> T; +} + +impl T3 for u32 { + fn t3(_self: Self, y: U) -> U { y } +} diff --git a/test_programs/compile_success_empty/trait_override_implementation/src/main.nr b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr index a385efc63fd..85528291870 100644 --- a/test_programs/compile_success_empty/trait_override_implementation/src/main.nr +++ b/test_programs/compile_success_empty/trait_override_implementation/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; fn method2(x: Field) -> Field { x @@ -13,8 +13,8 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } @@ -25,18 +25,18 @@ impl Default for Foo { trait F { fn f1(self) -> Field; - fn f2(self) -> Field { 2 } - fn f3(self) -> Field { 3 } - fn f4(self) -> Field { 4 } - fn f5(self) -> Field { 5 } + fn f2(_self: Self) -> Field { 2 } + fn f3(_self: Self) -> Field { 3 } + fn f4(_self: Self) -> Field { 4 } + fn f5(_self: Self) -> Field { 5 } } struct Bar {} impl F for Bar { - fn f5(self) -> Field { 50 } - fn f1(self) -> Field { 10 } - fn f3(self) -> Field { 30 } + fn f5(_self: Self) -> Field { 50 } + fn f1(_self: Self) -> Field { 10 } + fn f3(_self: Self) -> Field { 30 } } // Impls on mutable references are temporarily disabled // impl F for &mut Bar { diff --git a/test_programs/compile_success_empty/traits/src/main.nr b/test_programs/compile_success_empty/traits/src/main.nr index 784ff01a883..ed804559fed 100644 --- a/test_programs/compile_success_empty/traits/src/main.nr +++ b/test_programs/compile_success_empty/traits/src/main.nr @@ -1,7 +1,7 @@ use dep::std; -trait Default { - fn default(x: Field, y: Field) -> Self; +trait MyDefault { + fn my_default(x: Field, y: Field) -> Self; } struct Foo { @@ -9,13 +9,13 @@ struct Foo { array: [Field; 2], } -impl Default for Foo { - fn default(x: Field,y: Field) -> Self { +impl MyDefault for Foo { + fn my_default(x: Field,y: Field) -> Self { Self { bar: x, array: [x,y] } } } fn main(x: Field, y: Field) { - let first = Foo::default(x, y); + let first = Foo::my_default(x, y); assert(first.bar == x); } diff --git a/test_programs/execution_success/bit_and/Prover.toml b/test_programs/execution_success/bit_and/Prover.toml index 40ce2b0bc27..34a5b63e5b1 100644 --- a/test_programs/execution_success/bit_and/Prover.toml +++ b/test_programs/execution_success/bit_and/Prover.toml @@ -1,2 +1,4 @@ x = "0x00" y = "0x10" +a = "0x00" +b = "0x10" diff --git a/test_programs/execution_success/bit_and/src/main.nr b/test_programs/execution_success/bit_and/src/main.nr index 0bc1d9a49bd..5a0aa17e3ed 100644 --- a/test_programs/execution_success/bit_and/src/main.nr +++ b/test_programs/execution_success/bit_and/src/main.nr @@ -1,6 +1,6 @@ // You can only do bit operations with integers. // (Kobi/Daira/Circom/#37) https://github.com/iden3/circom/issues/37 -fn main(x: Field, y: Field) { +fn main(x: Field, y: Field, a: Field, b: Field) { let x_as_u8 = x as u8; let y_as_u8 = y as u8; @@ -9,8 +9,8 @@ fn main(x: Field, y: Field) { let flag = (x == 0) & (y == 16); assert(flag); //bitwise and with odd bits: - let x_as_u11 = x as u11; - let y_as_u11 = y as u11; - assert((x_as_u11 & y_as_u11) == x_as_u11); + let a_as_u8 = a as u8; + let b_as_u8 = b as u8; + assert((a_as_u8 & b_as_u8) == a_as_u8); } diff --git a/test_programs/execution_success/bit_shifts_runtime/src/main.nr b/test_programs/execution_success/bit_shifts_runtime/src/main.nr index a2c873a7e7f..33d68765598 100644 --- a/test_programs/execution_success/bit_shifts_runtime/src/main.nr +++ b/test_programs/execution_success/bit_shifts_runtime/src/main.nr @@ -5,4 +5,15 @@ fn main(x: u64, y: u64) { // runtime shifts on runtime values assert(x << y == 128); assert(x >> y == 32); + + // Bit-shift with signed integers + let mut a :i8 = y as i8; + let mut b: i8 = x as i8; + assert(b << 1 == -128); + assert(b >> 2 == 16); + assert(b >> a == 32); + a = -a; + assert(a << 7 == -128); + assert(a << -a == -2); + } diff --git a/test_programs/execution_success/blake3/Nargo.toml b/test_programs/execution_success/blake3/Nargo.toml new file mode 100644 index 00000000000..29f6ad5f11c --- /dev/null +++ b/test_programs/execution_success/blake3/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "blake3" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/blake3/Prover.toml b/test_programs/execution_success/blake3/Prover.toml new file mode 100644 index 00000000000..c807701479b --- /dev/null +++ b/test_programs/execution_success/blake3/Prover.toml @@ -0,0 +1,37 @@ +# hello as bytes +# https://connor4312.github.io/blake3/index.html +x = [104, 101, 108, 108, 111] +result = [ + 0xea, + 0x8f, + 0x16, + 0x3d, + 0xb3, + 0x86, + 0x82, + 0x92, + 0x5e, + 0x44, + 0x91, + 0xc5, + 0xe5, + 0x8d, + 0x4b, + 0xb3, + 0x50, + 0x6e, + 0xf8, + 0xc1, + 0x4e, + 0xb7, + 0x8a, + 0x86, + 0xe9, + 0x08, + 0xc5, + 0x62, + 0x4a, + 0x67, + 0x20, + 0x0f, +] diff --git a/test_programs/execution_success/blake3/src/main.nr b/test_programs/execution_success/blake3/src/main.nr new file mode 100644 index 00000000000..3bfea6c5f95 --- /dev/null +++ b/test_programs/execution_success/blake3/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +fn main(x: [u8; 5], result: [u8; 32]) { + let digest = std::hash::blake3(x); + assert(digest == result); +} diff --git a/test_programs/execution_success/brillig_ecdsa/Nargo.toml b/test_programs/execution_success/brillig_array_eq/Nargo.toml similarity index 67% rename from test_programs/execution_success/brillig_ecdsa/Nargo.toml rename to test_programs/execution_success/brillig_array_eq/Nargo.toml index 972dd9ce93b..62ce392f96b 100644 --- a/test_programs/execution_success/brillig_ecdsa/Nargo.toml +++ b/test_programs/execution_success/brillig_array_eq/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "brillig_ecdsa" +name = "brillig_array_eq" type = "bin" authors = [""] diff --git a/test_programs/execution_success/brillig_array_eq/Prover.toml b/test_programs/execution_success/brillig_array_eq/Prover.toml new file mode 100644 index 00000000000..ecfed7de213 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/Prover.toml @@ -0,0 +1,2 @@ +a = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] +b = [77,75,108,209,54,16,50,202,155,210,174,185,217,0,170,77,69,217,234,216,10,201,66,51,116,196,81,167,37,77,7,102] diff --git a/test_programs/execution_success/brillig_array_eq/src/main.nr b/test_programs/execution_success/brillig_array_eq/src/main.nr new file mode 100644 index 00000000000..90f631dbed8 --- /dev/null +++ b/test_programs/execution_success/brillig_array_eq/src/main.nr @@ -0,0 +1,4 @@ +// Simple example of checking where two arrays are equal +unconstrained fn main(a: [Field; 32], b: [Field; 32]) { + assert(a == b); +} diff --git a/test_programs/execution_success/brillig_blake3/Nargo.toml b/test_programs/execution_success/brillig_blake3/Nargo.toml new file mode 100644 index 00000000000..879476dbdcf --- /dev/null +++ b/test_programs/execution_success/brillig_blake3/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "brillig_blake3" +type = "bin" +authors = [""] +compiler_version = ">=0.22.0" + +[dependencies] diff --git a/test_programs/execution_success/brillig_blake3/Prover.toml b/test_programs/execution_success/brillig_blake3/Prover.toml new file mode 100644 index 00000000000..c807701479b --- /dev/null +++ b/test_programs/execution_success/brillig_blake3/Prover.toml @@ -0,0 +1,37 @@ +# hello as bytes +# https://connor4312.github.io/blake3/index.html +x = [104, 101, 108, 108, 111] +result = [ + 0xea, + 0x8f, + 0x16, + 0x3d, + 0xb3, + 0x86, + 0x82, + 0x92, + 0x5e, + 0x44, + 0x91, + 0xc5, + 0xe5, + 0x8d, + 0x4b, + 0xb3, + 0x50, + 0x6e, + 0xf8, + 0xc1, + 0x4e, + 0xb7, + 0x8a, + 0x86, + 0xe9, + 0x08, + 0xc5, + 0x62, + 0x4a, + 0x67, + 0x20, + 0x0f, +] diff --git a/test_programs/execution_success/brillig_blake3/src/main.nr b/test_programs/execution_success/brillig_blake3/src/main.nr new file mode 100644 index 00000000000..05a5b31f936 --- /dev/null +++ b/test_programs/execution_success/brillig_blake3/src/main.nr @@ -0,0 +1,6 @@ +use dep::std; + +unconstrained fn main(x: [u8; 5], result: [u8; 32]) { + let digest = std::hash::blake3(x); + assert(digest == result); +} diff --git a/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml b/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml new file mode 100644 index 00000000000..495a49f2247 --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256k1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256k1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_ecdsa/Prover.toml b/test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml similarity index 100% rename from test_programs/execution_success/brillig_ecdsa/Prover.toml rename to test_programs/execution_success/brillig_ecdsa_secp256k1/Prover.toml diff --git a/test_programs/execution_success/brillig_ecdsa/src/main.nr b/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr similarity index 52% rename from test_programs/execution_success/brillig_ecdsa/src/main.nr rename to test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr index 23f017aa336..5d84d885567 100644 --- a/test_programs/execution_success/brillig_ecdsa/src/main.nr +++ b/test_programs/execution_success/brillig_ecdsa_secp256k1/src/main.nr @@ -2,10 +2,15 @@ use dep::std; // Tests a very simple program. // // The features being tested is ecdsa in brillig -fn main(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) { +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); } -unconstrained fn ecdsa(hashed_message: [u8;32], pub_key_x: [u8;32], pub_key_y: [u8;32], signature: [u8;64]) -> bool { +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { std::ecdsa_secp256k1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) } diff --git a/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml b/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml new file mode 100644 index 00000000000..0a71e782104 --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256r1/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "brillig_ecdsa_secp256r1" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml b/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml new file mode 100644 index 00000000000..a45f799877b --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256r1/Prover.toml @@ -0,0 +1,20 @@ +hashed_message = [ + 84, 112, 91, 163, 186, 175, 219, 223, 186, 140, 95, 154, 112, 247, 168, 155, 238, 152, + 217, 6, 181, 62, 49, 7, 77, 167, 186, 236, 220, 13, 169, 173, +] +pub_key_x = [ + 85, 15, 71, 16, 3, 243, 223, 151, 195, 223, 80, 106, 199, 151, 246, 114, 31, 177, 161, + 251, 123, 143, 111, 131, 210, 36, 73, 138, 101, 200, 142, 36, +] +pub_key_y = [ + 19, 96, 147, 215, 1, 46, 80, 154, 115, 113, 92, 189, 11, 0, 163, 204, 15, 244, 181, + 192, 27, 63, 250, 25, 106, 177, 251, 50, 112, 54, 184, 230, +] +signature = [ + 44, 112, 168, 208, 132, 182, 43, 252, 92, 224, 54, 65, 202, 249, 247, 42, + 212, 218, 140, 129, 191, 230, 236, 148, 135, 187, 94, 27, 239, 98, 161, 50, + 24, 173, 158, 226, 158, 175, 53, 31, 220, 80, 241, 82, 12, 66, 94, 155, + 144, 138, 7, 39, 139, 67, 176, 236, 123, 135, 39, 120, 193, 78, 7, 132 +] + + diff --git a/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr b/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr new file mode 100644 index 00000000000..9da07f531aa --- /dev/null +++ b/test_programs/execution_success/brillig_ecdsa_secp256r1/src/main.nr @@ -0,0 +1,16 @@ +use dep::std; +// Tests a very simple program. +// +// The features being tested is ecdsa in brillig +fn main(hashed_message: [u8; 32], pub_key_x: [u8; 32], pub_key_y: [u8; 32], signature: [u8; 64]) { + assert(ecdsa(hashed_message, pub_key_x, pub_key_y, signature)); +} + +unconstrained fn ecdsa( + hashed_message: [u8; 32], + pub_key_x: [u8; 32], + pub_key_y: [u8; 32], + signature: [u8; 64] +) -> bool { + std::ecdsa_secp256r1::verify_signature(pub_key_x, pub_key_y, signature, hashed_message) +} diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml b/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml deleted file mode 100644 index 071254266f4..00000000000 --- a/test_programs/execution_success/brillig_set_slice_of_slice/Nargo.toml +++ /dev/null @@ -1,7 +0,0 @@ -[package] -name = "brillig_set_slice_of_slice" -type = "bin" -authors = [""] -compiler_version = ">=0.19.4" - -[dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr b/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr deleted file mode 100644 index c0e9c7d172f..00000000000 --- a/test_programs/execution_success/brillig_set_slice_of_slice/src/main.nr +++ /dev/null @@ -1,51 +0,0 @@ -struct Property -{ - key : [u8], - value : [u8], -} - -struct JSON -{ - doc : [Property] -} - -unconstrained fn slice_eq(self: [u8], other: [u8]) -> bool { - let mut equal = true; - for i in 0..self.len() { - if self[i] != other[i] { - equal = false; - } - } - equal -} - -// This test acts a regression for issue #3476 -unconstrained fn main() { - let mut json = JSON { doc: [] }; - let mut prop = Property { key: [], value:[] }; - - let other_prop = Property { key: [0, 1, 2], value:[10] }; - json.doc = json.doc.push_back(other_prop); - - for i in 0..3 { - prop.key = prop.key.push_back(i as u8); - } - prop.value = prop.value.push_back(5); - - // add property to json or replace existing - let len : Field = json.doc.len(); - let mut found = false; - for i in 0..len - { - if (!found) - { - if (slice_eq(prop.key, json.doc[i].key)) - { - json.doc[i].value = prop.value; - found = true; - } - } - } - assert(found == true); - assert(json.doc[0].value[0] == 5); -} \ No newline at end of file diff --git a/test_programs/compile_success_empty/brillig_to_bits/Nargo.toml b/test_programs/execution_success/brillig_to_bits/Nargo.toml similarity index 100% rename from test_programs/compile_success_empty/brillig_to_bits/Nargo.toml rename to test_programs/execution_success/brillig_to_bits/Nargo.toml diff --git a/test_programs/compile_success_empty/brillig_to_bits/src/main.nr b/test_programs/execution_success/brillig_to_bits/src/main.nr similarity index 100% rename from test_programs/compile_success_empty/brillig_to_bits/src/main.nr rename to test_programs/execution_success/brillig_to_bits/src/main.nr diff --git a/test_programs/execution_success/debug_logs/src/main.nr b/test_programs/execution_success/debug_logs/src/main.nr index 6accdf725d9..52c910065c1 100644 --- a/test_programs/execution_success/debug_logs/src/main.nr +++ b/test_programs/execution_success/debug_logs/src/main.nr @@ -39,7 +39,26 @@ fn main(x: Field, y: pub Field) { let struct_string = if x != 5 { f"{foo}" } else { f"{bar}" }; std::println(struct_string); + let one_tuple = (1, 2, 3); + let another_tuple = (4, 5, 6); + std::println(f"one_tuple: {one_tuple}, another_tuple: {another_tuple}"); + std::println(one_tuple); + + let tuples_nested = (one_tuple, another_tuple); + std::println(f"tuples_nested: {tuples_nested}"); + std::println(tuples_nested); + regression_2906(); + + let free_lambda = |x| x + 1; + let sentinel: u32 = 8888; + std::println(f"free_lambda: {free_lambda}, sentinel: {sentinel}"); + std::println(free_lambda); + + let one = 1; + let closured_lambda = |x| x + one; + std::println(f"closured_lambda: {closured_lambda}, sentinel: {sentinel}"); + std::println(closured_lambda); } fn string_identity(string: fmtstr<14, (Field, Field)>) -> fmtstr<14, (Field, Field)> { @@ -79,3 +98,4 @@ fn regression_2906() { dep::std::println(f"array_five_vals: {array_five_vals}, label_five_vals: {label_five_vals}"); } + diff --git a/test_programs/execution_success/double_verify_proof/Prover.toml b/test_programs/execution_success/double_verify_proof/Prover.toml index 3e6d996d0e1..dff48212e50 100644 --- a/test_programs/execution_success/double_verify_proof/Prover.toml +++ b/test_programs/execution_success/double_verify_proof/Prover.toml @@ -1,12 +1,5 @@ -input_aggregation_object = ["0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0", "0"] -# key_hash = "0x17a5d2b205c1bf45b015ba33bc2f0beb7fbb36682f31f953b8d4d093c8644be5" -# proof = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000008f66908323784e7c5259f4eefab77ca881","0x0000000000000000000000000000000000109cac7b943f9b737d7b023d4f5d8a","0x000000000000000000000000000000e991d3ac0a68a252bd3cd09cd1b43fe1b4","0x000000000000000000000000000000000014213d346a426777351fdabaa0fa26","0x000000000000000000000000000000e4603692a76af630015380b08d0e13c239","0x0000000000000000000000000000000000149e7046461203c1b72392bb93c262","0x000000000000000000000000000000c27ffc719f223ca6ef5476a5b66f03a4a8","0x000000000000000000000000000000000003718c62098243e3c1f38090e61753","0x000000000000000000000000000000749492aa98716ce5bf7c06e5c2a0a8a528","0x000000000000000000000000000000000018e4c7d33848bccdc3eed924bfaa15","0x0000000000000000000000000000004e10a37f41fd7c4fe32982daa498530d62","0x00000000000000000000000000000000001b76c8c59489c63f11280187109dd7","0x0000000000000000000000000000002a6cd84d3b8537a7c3cb0cb9461f02e4bb","0x0000000000000000000000000000000000197e524fd48ca5ccb30d6c5ffe689d","0x0000000000000000000000000000000013bf25498ce1f51078c06dac450c0325","0x000000000000000000000000000000000018d347b88a0c32e32571deb9b40466","0x00000000000000000000000000000060d496191298eb1b1c2ce18f9a4afcfc55","0x000000000000000000000000000000000024e11b8e8fcb45b8628cb9cc565513","0x00000000000000000000000000000004e976f6d12fff6250eea2d21c570d3d6a","0x00000000000000000000000000000000000967dbd89d2c7dc0121ea71ded7203","0x000000000000000000000000000000d96f810588c0daa43e88d765a3f82ea9b7","0x00000000000000000000000000000000001f69d7015fe6694bd1d4d61049dae9","0x000000000000000000000000000000c539910d0f81a890fa3d996a676db39640","0x000000000000000000000000000000000026d8b64020a669e24f740b4eba633a","0x000000000000000000000000000000c53cc90f99c40eb5d449f38180d9e9c8b6","0x00000000000000000000000000000000001071ddf2bacc2367dfb2c5084b7dd1","0x0000000000000000000000000000001b9791181eb174db1a50d903fa9fea9999","0x0000000000000000000000000000000000118c059d41a95311a5c361c6a9a00d","0x0000000000000000000000000000003caf4ad04551a3ffba19cc6a1fff457370","0x00000000000000000000000000000000001dc4d8be804c5289fbf54183f93149","0x00000000000000000000000000000050766764bb82799df5172043c515956263","0x00000000000000000000000000000000000a5849adbac9c33e53571b29aab672","0x0000000000000000000000000000002edb078e589d44ac93e283680b34adf574","0x000000000000000000000000000000000015e9e187c4fb683ca78d52a2a0301b","0x00000000000000000000000000000048ac0f1db3575ed0f84d61ab6cbdd53d9f","0x00000000000000000000000000000000002ddc4243fbc7104347d29a823194ae","0x00000000000000000000000000000070ad92aeea2bdea4277ffdfa3d3ed93443","0x000000000000000000000000000000000003bad3e3aae806c278094cb682a8e0","0x000000000000000000000000000000fb74b99eb44c80d8f7ba83d7e9e2efa5c0","0x00000000000000000000000000000000002819cc14e399c1dadc4f921e2a58fa","0x000000000000000000000000000000e3938bb3e7866c6499ec44fb72549efca0","0x00000000000000000000000000000000002d8264d5cdc2109da12e1864aca147","0x000000000000000000000000000000b12d7828cacbe86350f0b171b0cb0d1cd4","0x0000000000000000000000000000000000244155cecb315326f05641cac9475c","0x070b059f9471e22eed5a9ea08093dba3c59c941634611884c5f0f1a1a6b93e5c","0x118124ada70b46c7d23a6ca8b90d545f30e028b1689fe5c55c86bf55f42e0401","0x25dca6ad78c03ce1f7783cc39a5ea5ef90b318d5edf4f1367d4451c1db3c113e","0x0d9557b4e661b5c53b84fcb41f05d15c0ca112430db16f56d0ab54032fffe734","0x06aedf13a3b182784f4d64162f4706759f95e42fc8dc17d1b8b5f551dafdc007","0x132f97ab5f1f8817689b17e336125c5273d6970a1b3b0901fd26d193a4d2dce4","0x1b0c9980b5b09343e807d82bad307a06d1bfadcd1fa50be666c2907d31ef43e1","0x1ce7000cb24ecc1f2ff9d9507b2290513fed574a84d893811cb54a3c0bc51ccc","0x2e1df58d36444c2dfda98991847422f56ef66f079d26eb7f8110d0d7c46b2c0c","0x166c2f821be7c3e3e4440980e73770840194f14d003778b7fbcdd2690776255c","0x1ae8390287e2eb117851a5011575ba3801e5ee5c66a8f7432e2a2fb13c276008","0x047c09806bfb272d940d9b802e3e49b40050fd0f66717e8b325c5d4834b13aac","0x08f81300d7f64e5b281b37005c7c936800a1fa1ecce2fd1664b8ba9069627558","0x2ed7260e623b68d580304751341bb72141314b881e9e8dade626bf5cde8a077c","0x23e04c035fd9396ca06cdc0171f24da00287e87b338bf45992e2ea41a589d560","0x285c5583cbd4609a347a7781a030975402d8e58a99fd72e4c53f4789da3b100c","0x2cd85f0437cf87c7c8881301ce6ee1080329e29a006ef16ff79ba4d20eec4ab8","0x12eb74da267caf98c494db16c87f90f510fdca1f8095b40156a6f0bb066e3400","0x2267004535c434df4cbee1a356e48b1f317cb849ac69c3eb94e377d2274f1e08","0x2c9d4ce9d1d8b8cf1cb90cbc3e121f570c8260c53b48ed2070d474d5a6f12c4e","0x2c6c83ffaad6f30da5aa696973ccfbd0337cb7a5e5f9e5fc8e826dce21e8f51c","0x056c23922e9435f93853044ba96a1c12db97f47053987df5187269ce884ec00f","0x09e82d129a8f5d26cc609fcbd7a865c6dbe8f17fba09fc804acec716bcfffabb","0x0e643693068a8454606e3b4c05e6af7adc39ee8f207b7b0b7d2b245ef1b13567","0x12e040137285ab81f47bd6cc3424f92edc8aeb9e86ecf996af8781a726627013","0x00f01a11c2990ecba44568cb7b2bd25edb46f760ed26ff69e6160c86539d8563","0x28a91699dfa4e85e18e8621d39a147a40930701d2d88546e01adb71a1f8e407f","0x000000000000000000000000000000009d7cc0b2d2bdef816f4fb17e7a6f6c08","0x00000000000000000000000000000000bcfc1a7030171f681f2c6e97c61f4e70","0x00000000000000000000000000000000dc7b742d8d704f4ecf092bb111cf30d8","0x13b099dc4869006fde9df04bf36f4c8f08d4491cc6229ac36a98f93214c79b6a","0x008fa95e0d431d617d8d3288fde7f8bbe36492019943e2018564633528575892","0x0fc66c06bdff20dba4dc491d5cd13cc209c4d2d9e29802db665bb397c2a4e754","0x0fe48ae6623efbaadce6d6b75b87be6caa19c2fd4d94a74149ceb6bcb88251e1","0x1bb41738028855cb5e0085edcd62cff208121427ea19a57425a0cf6bb68deb93","0x0fbc646333ddc21ab1a77b01a35973a56d5a617c482a21a231497fd3cc9b74c1","0x19ab9eaa1a902faff2dd9baa19ff00cea9086baa8c28bcdb95f7a3549eaf09b4","0x25e2b7a7643df4d964cd787b593888b00abfe3ce79e8deaa6d68fd1686b84bcb","0x2d134d7eea07414451e54854d61d5b71245434d0638bba9a1184914f65f2521c","0x03df94e38e9eed8586acd277d180d5d515b49d89d37525f871be2ff4552c586c","0x0b102abb146839f073c4a2514e65a8962f48ee8bbd1801e815d9c42d34665ebd","0x000000000000000000000000000000b7a4109cb92b514401fb63667454a9c892","0x0000000000000000000000000000000000016fce7f8ef56fef466636f3fbc3de","0x00000000000000000000000000000005f2d1c401a7aa14ac7e9fce7c21ec2e1a","0x00000000000000000000000000000000000621322c74c5d0da5eb71a4f2b046f","0x00000000000000000000000000000073d774ad7f61b1c1b93800f7838cca6bde","0x00000000000000000000000000000000002d603cc025e6af192394df113d4677","0x00000000000000000000000000000066a2a702b4d4b1a24af9c56cacb18ae4b8","0x00000000000000000000000000000000000124a3c25b427cfb6fca07525c5b8d"] -# public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] -# verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] -# proof_b = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x0000000000000000000000000000002ab91b132e624f2a408aa8c9bf31cca8d7","0x000000000000000000000000000000000015ad57528e0f065c820cc5ad4eab81","0x0000000000000000000000000000001acb78b1b6a5c9a6ec8bf2272b463014da","0x0000000000000000000000000000000000117fd65346e04bf3666d2ab3f24c90","0x000000000000000000000000000000aad0adaf9a768ba6a178f804edac5c8943","0x000000000000000000000000000000000004a11c7d31f25c20e3af16f9b01f71","0x0000000000000000000000000000001f0ae9bb921893ce2710148eb1fcd99e39","0x0000000000000000000000000000000000123fda5008d3709f5afeda01de1930","0x000000000000000000000000000000971c2a8d0119097fd82b7a8074a14853f8","0x000000000000000000000000000000000009965b998750710678da7891d8aba6","0x0000000000000000000000000000002d6ef3813ba14a5f5202afed6b1c41de1c","0x000000000000000000000000000000000020366bfdb2f9279c43d66f90dfdf4d","0x00000000000000000000000000000041389f221eadec33e1b87518668c3bc92e","0x00000000000000000000000000000000000d3858169bb0432ab761d4be8ef03e","0x000000000000000000000000000000c1dbfe670dc912cb0fa1a0f633f81a4cef","0x00000000000000000000000000000000000fc0c403e668b0f51e07089082c32f","0x0000000000000000000000000000009a4fba9bf1369f637fd295c8bf795c9d02","0x00000000000000000000000000000000001d6d1e7286ce52401e6ea79d2cfa3d","0x0000000000000000000000000000004762bf7702ffe7a2c147e704280cd50bba","0x0000000000000000000000000000000000205797cdeaeff9a8d5ea4b95d41b1a","0x000000000000000000000000000000b3d43cc863ba8d98f51118c0db70761079","0x00000000000000000000000000000000002d2a3d10381bc6b47a693c1692b1b6","0x000000000000000000000000000000d35a69fb0e68729f71e651799c0d19e9eb","0x00000000000000000000000000000000002ade1dc7741b7f397271c10e596557","0x0000000000000000000000000000001a67b44714687085004e4142f700043298","0x00000000000000000000000000000000001bb7bbb7f45876b1d72e5d20cee106","0x00000000000000000000000000000025f1f1cbf43fad70cba255b37a19e88b0c","0x00000000000000000000000000000000000cc46b215fbd8e4b233cc74aab250b","0x0000000000000000000000000000008168026f51135fc1670664bc50e629917f","0x000000000000000000000000000000000004d822d80ba0c1bcbd4b000573c6f9","0x000000000000000000000000000000d85756249b937277eba3f5dcb89c56e7bb","0x000000000000000000000000000000000019a3a7a5b20dac138d7ddb1d499134","0x0000000000000000000000000000007621614c7ebc31a2177011f9da01668eb3","0x000000000000000000000000000000000024e9beb5d616ab120073170fc431e8","0x00000000000000000000000000000031fbf901896e958fbbed3e5c57aebbdd04","0x0000000000000000000000000000000000005ac0f10fcc255e179a40518875d4","0x0000000000000000000000000000002dab820c019bcca563b7dbdd26974653e9","0x00000000000000000000000000000000001a5655ec1a67f722b14c65d5c2197f","0x0000000000000000000000000000008e277e490196db5c19d09a9034e10c6432","0x000000000000000000000000000000000003f13b1af07db07eec88698d0aaf2a","0x0000000000000000000000000000002d618452e2b4c790d0551ea5863ed62e76","0x00000000000000000000000000000000001a7171e790a433a972d80218fb482d","0x0000000000000000000000000000005669975cd5bf65a739c0a35a8ab9b7963b","0x00000000000000000000000000000000000d27ffb6f00c86a0ce76a8067d1bce","0x03a0054fe9f93ab96e7c7ed6ec1ac641dffd99a1c804ee5db52cf1efa1a12c15","0x059324381c89c12c87d0f6c27963c31647721fdb02c125961da1a21cbfb3ed1c","0x04a5ead891b7c3f30329e6abcf2ac6903c3c1d8e68874f6baf3a6fc00e84533a","0x03c02f6b862734acf9d0c5133f8141b3a008c5499336a588b376a5dd86d9c837","0x1dd26b35c21c584c410df89d1fd549e7f5da9bb4fd290b7c528d92fbd652f5ad","0x2c8e7ef6f7a130769ae74d0f47aeab5c443492ef4b1ed0b3a9d61dfca80cbdda","0x2b074486c21c62e6eccf3191b3ab3c8df0fb98f0c44b9f0e9e2c281b908b83a6","0x149a6d620be135bba6bbfe8ac826df37567c8be78007e47cdcf5d6e4683d339e","0x119fdfd330036bde31af71e43bd5e191460605e4760d08a6e0ebddbdb5abfeeb","0x1713efc63c00b2de4f68e696d9d30c5603963484f4829e716de2796640864b09","0x1bb1862114cda3712c177b1b6bca0ecd9de7723925698aee83dc91ade7078d3e","0x049d965ad8ccf092dcae948491f702779a513db430e6ec7d15fa1847a6814235","0x093b2cb5b199e125b95d290923ee04ef34a27b6861cdd8fa2bf4308f4d02846a","0x2710c6cd6f14f8071976509d1002e915bfc182b843a3967134de380302423c72","0x24ecb2d6c6678496e574a4248fb813bcd289eda1873763e8afd0c23d340a11a8","0x298a49319f347529c22338a921af16346cdb2b55b81e5065c5cada84da8b53dd","0x2e27df8c780165bc9ed1cd2db3a618ac072c6909e9053ce2dbc4f2cc810c9612","0x07350f3a2e23c1ccbde0d39370159060de5b8df40ae7c58d3f9852b371f1adac","0x2fdf8bf8e2fa2acad0f6d6a3f73e7dc516e8e2d167128bf3a560972339122835","0x0d3ec457703c228d4b6cd1635df9d9bde51997d0228edef64d667cbd16f3cb70","0x148320b9ceab1f3be840dc38b0344e7db0755283d1eacf2dd472e99ee0fb473f","0x06febdcf4869a6b89fdeb0805612c53e139afc29d119a54bc3d72dc7de0f1a7b","0x0b9c542a2136974b7c8d4504e809c7b5adec39de020091c8d9d1460f84905cb0","0x1039ea84fa0387de593bd9897a00ca2d483d779232e77e45efcb5e572b119ee5","0x14d780dfd2d0787135ea6e0e0bf7cca4e28eb54663ce6ac305c5769ed192e11a","0x026127746f9cb625c3301bfbc41bc2c67949be75a032b8ceaddd1580378dd846","0x123cf1180af5fdf09444de423947c9a71790f2c85468b51ecc25fb7bf075a0d5","0x000000000000000000000000000000008419a4f769ceb72c3ac28f559331a5df","0x000000000000000000000000000000009e852c5c1891a89b79b64599e3d52d72","0x00000000000000000000000000000000b8f0b3c0c7549a0ab8a9fbde3478b505","0x056af493dda97ae84cdbbf9ce379e35bdd66e1223eebacdc4a6c2c92553604f4","0x023624c49a722bc8dc5d945b4b10be8ed6c608020e65038a470b5a407375c8aa","0x0ed9f8dd445bda548ef08b7a2ff233867c41b72786f98054597833a68cc9b900","0x2cbf3d04669aa3a0dcda95e19da879f36029abe28317f1ee69be28ddef2a0b87","0x284ca7049611e293aa4535dd7841a540996609d541814373b387b00069636f14","0x246a69ce4030b1e8a675eec89960bfe188bd4073f07afe74f7a77c0698c80bc5","0x1bbdab5d007c4743fbcbf3cc89252baf0b0e1b645b977434ccd4e7560d124761","0x210427e70ee1b484bbb0b4e98263faf24a45325236eed618d51dcb1cb3a9f60d","0x1fbc24b0bd5b0b8c514e138317cc332962714dd306b34939768d723d6ea2ca8e","0x1e74217a6bd46293e6eb721cad346b607a9d6953d677bc5a17fd195e299b9f0f","0x1d2c1e441a4db99b7c88d0b6429ca39792c984d4a63c2f7ab96cc07ee4947390","0x00000000000000000000000000000005b1e3524625c466540f3f7468172403cb","0x000000000000000000000000000000000013bb985f9d5562699347b5dfbc441e","0x000000000000000000000000000000f4fb87d7f481bb198aa6237a0c9ffd3c22","0x0000000000000000000000000000000000254c5f1b76e278f4c71cf5e71533dd","0x0000000000000000000000000000005a72a28b51be9c538b4d28b5106b9239b8","0x00000000000000000000000000000000000d02d80e1a73c82cb0dd8af1aabb3f","0x000000000000000000000000000000434c46502fc1c425a72a4717a3e44c3415","0x00000000000000000000000000000000001c8d74d967b9b65ff2772592a15d0e"] - key_hash = "0x096129b1c6e108252fc5c829c4cc9b7e8f0d1fd9f29c2532b563d6396645e08f" -proof = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] +proof = ["0x000000000000000000000000000000d62b795bec274279129a71195796825fcc","0x00000000000000000000000000000000000793ab763140f20a68a6bd2721fd74","0x00000000000000000000000000000053141d06d3307b36153f321511199e579c","0x00000000000000000000000000000000000a4b55d6c21f98a9c434911dcb5c67","0x0000000000000000000000000000005f9d324c0abd22cec92d99dbec438e9491","0x0000000000000000000000000000000000240dfafe1b53dc27147cbab14ea893","0x000000000000000000000000000000044a61d3aac32c6931247cf334a19d9611","0x000000000000000000000000000000000003f0f8cf4207bfa85c23ec9f8d0c88","0x00000000000000000000000000000002168a470e39ba2ac266f6b474de12045f","0x000000000000000000000000000000000025791e7d3feab542345c00ec5a30df","0x000000000000000000000000000000dcafd76d4c3640969c80e017b951ef6397","0x00000000000000000000000000000000001d27f75a1256771e88e0c86fc42dbc","0x0000000000000000000000000000007347ae7d2d9d7fc2b8f0baa014ee1fed9f","0x000000000000000000000000000000000018bd927f42bf7caf9555f56f09000d","0x000000000000000000000000000000041f765f83cbe5904c8f453f70a4531d10","0x00000000000000000000000000000000001858aabeeb5331a221419f4fed1c19","0x000000000000000000000000000000d254a54caaedf8287b9af951b2f2611121","0x000000000000000000000000000000000005ab493623c9563cf2e55ba5f18200","0x00000000000000000000000000000014f24cddc1a02440dc63637df8032c8074","0x000000000000000000000000000000000011950c16cef98471b1d78b935195a4","0x000000000000000000000000000000b0340b459e6bd5cc8f031c8654a502897f","0x00000000000000000000000000000000000e1cf3968dac4545a76a2ae58e512c","0x0000000000000000000000000000002adf7218aa06ca0d2c2e600dcc39193a2d","0x00000000000000000000000000000000001302e7e4b0f14749bd885ca25588b6","0x00000000000000000000000000000092009ce4056e79ab815d8cdfd4491138ae","0x000000000000000000000000000000000018af11e853c6cf2f0f6274b0da8133","0x000000000000000000000000000000dd3dc6f49232141718527b3a0e4b26e21d","0x00000000000000000000000000000000001a877853348a8b695c4f9a9aa4ce68","0x000000000000000000000000000000aecfc56ba07155450b368140d6324023b5","0x000000000000000000000000000000000029c11052798c57ece614617d33fcc2","0x000000000000000000000000000000eb106ffc816d16fb84e84b0b61157b2603","0x000000000000000000000000000000000026c3cac16206899a21cb5126841446","0x000000000000000000000000000000a782ed54805fe845068b362b58e2fa34ec","0x00000000000000000000000000000000000cf046a1bfcc666b7f28b572676073","0x000000000000000000000000000000b931c8dda60bb4aca4cc817f5540f1209f","0x000000000000000000000000000000000024ad50c3936fafc3d190e6a4874223","0x000000000000000000000000000000cce90cfbaf5671c8c8652db28a3a9566f7","0x000000000000000000000000000000000003574db9d0f84380c9635660f86354","0x0000000000000000000000000000003eb3e1dc31846a90f721e7a08c6d6dc4f7","0x000000000000000000000000000000000028999a700cd1abae1a288eebb9a91c","0x000000000000000000000000000000c1be4d385b11387e14eb9817050d772f78","0x000000000000000000000000000000000003c56b5bad8b4484c66ac921f1f102","0x000000000000000000000000000000ace245cabf0f00dc7fd253dd8af0377a14","0x0000000000000000000000000000000000107f1731fcf34b364c813599fa1df7","0x035b937d404932b542b706eb810ef4a7dca4566d4dde1ad6a8717f46167ead7e","0x17608cef3dc7960f41cb1295706df663727d45ee598a61e05e989d111449fb65","0x054712a950ad67da3aa860e49e6891f99b586b7f37caff94eb013fdb374b61ee","0x04b755083086c769b7f593e0e48d68dc54be808203351380ca5566a48149d8bb","0x17d7670b0915235f626fdc1d7e1134d2be906ef138d7843384b3ebc23b1d630f","0x064cf544ab5f4e3dab47960502cccc83321fb275068dfbdd3a2fcbc6dddcaa65","0x083338262712e2b66769ea40d9f412b18caa1bc81a51ff5a50b6c41f8c4b3d23","0x0cdd38958cab97defde00f4a5961b6fd676e29d9f2c352f6bb2c68b91f83f8af","0x02c8bdd005c2f43a0a8cbb2744916ce5c322dfa5b23367a829c12699f4036d32","0x25bac73c7e7b659fbea3135b7a0decf9db8dc3045bd2837dae337c64cc722546","0x19eb361aa419d37bce3d2e8b2b7692a02a9559e83d7f3d8fe9169970fbbc2cba","0x2494bd5106d00e05c7ea60e632e9fe03773b7f2c5b662aa37ec512a01f4a0775","0x18c52c2f2c6e7be1d7847c15e452a3a9c64316103d12e4b5b9a82fac4e940ee9","0x0e0342810456ef78f498c1bfa085a5f3cbc06db1f32fabd0ea9ad27dccac1680","0x024c13d6ef56af33ed7164ea8e47ddecc8a487b000d8b1b45edcd3895a503ba2","0x26e0d127f626bd39b55bc5d0c131dbf03fe006dc5c3edc57dda1e629799a4317","0x1b1140061bc52b15c4f5e100729a81968ee79dc03deb966a18850335a8e44a8b","0x1bb76f945199e71d531a89288912087a02dd0e83020e65d671485bf2e5e86e1a","0x29269900859c6d86e404185b415bf3b279cd100f38cfdb0077e8d6a299c4fd35","0x22b5e94bae2f6f0cdb424a3b12c4bf82cec3fb228e012c1974ed457827bbe012","0x18d3543a93249778e7a57936170dae85ffc47c2567f2d0076a32c0bb86fcf10a","0x03721dc2670206cde42a175fd56bcce32cf6cb8801450a8e8e4b3d4e07785973","0x2806db136dd214d3ac1478460855cae6a4324ab45cab35320d104fee26c260e8","0x1c3749f1937082afbbae9375b9be708cf339e1983e57ef4447f36cfa560c685c","0x1067b8cfb90ef08bcb48aea56b2716334241787c2004a95682d68a0685566fd0","0x0f41aee4416398f1d48ffc302403273cddef34a41f98507c53682041d82e51ff","0x10d854c9f0bfbdff7ca91a68f4978e9a79e7b14243d92f465f17bdf88d9f64f8","0x00000000000000000000000000000000018938b11099e0cdc05ddab84a153a97","0x0000000000000000000000000000000001d7dda1471f0dc3b3a3d3438c197982","0x00000000000000000000000000000000022682917da43ab9a6e9cbcece1db86d","0x2453913e6b0f36eab883ac4b0e0604d56aaeb9c55e641135173e63c342f1a660","0x05216c1b58dc43a49d01aaba3113b0e86be450fc17d28016e648e7162a1b67fb","0x152b34845a0222a2b41354c0d395a250d8363dc18748647d85acd89d6934ec56","0x1dfc6e971ce82b7dcda1f7f282713c6e22a8c79258a61209bda69719806da544","0x2968dd8b3af8e3953f1fbbd72f4c49b8270597bb27d4037adc157ac6083bee60","0x1b9425b88a4c7d39b3d75afe66917a9aa1d2055724392bc01fb918d84ff1410e","0x04ab571f236d8e750904dc307dd274003d9130f1a7110e4c1521cfb408877c73","0x2ad84f26fdc5831545272d02b806bb0e6dae44e71f73552c4eb9ff06030748c7","0x020e632b99d325db774b8630fb50b9a4e74d35b7f27d9fc02c65087ee747e42c","0x09a8c5a3171268cb61c02515c01c109889200ed13f415ae54df2078bbb887f92","0x1143281a9451abbb4c34c3fa84e7678c2af2e7ea8c05160a6f7f06988fc91af8","0x000000000000000000000000000000cbda736ca5cf6bc75413c2cc9e28ab0a68","0x00000000000000000000000000000000001ee78c9cc56aa5991062ae2e338587","0x000000000000000000000000000000bc9bfcdebb486f4cb314e681d2cc5f8df6","0x00000000000000000000000000000000000ad538431d04771bca7f633cb659ff","0x000000000000000000000000000000d45b317afcefa466a59bba9e171f1af70c","0x0000000000000000000000000000000000133c50180ea17932e4881124e7a7c6","0x000000000000000000000000000000fc9ed37f543775849f3e84eaa06f77f992","0x00000000000000000000000000000000001372873c9c051d1baff99248b8f70e"] public_inputs = ["0x0000000000000000000000000000000000000000000000000000000000000003"] verification_key = ["0x2b337de1c8c14f22ec9b9e2f96afef3652627366f8170a0a948dad4ac1bd5e80","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000005","0x0000000000000000000000000000000000000000000000000000000000000008","0x0000000000000000000000000000000000000000000000000000000000000001","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x0000000000000000000000000000000000000000000000000000000000000000","0x00000000000000000000000000000092139c61bae1a44f0fc7689507414be688","0x00000000000000000000000000000000000160ce4e279582f91bde4f03f5e9a2","0x0000000000000000000000000000005dc2d37f658c3b2d60f24740eb13b65d79","0x000000000000000000000000000000000007e3e8a5d98a1177ec85bf88f163a5","0x000000000000000000000000000000dc3035fbd7ff16412a8fd7da587a935298","0x000000000000000000000000000000000023d08e2817ac16990004ed11d8fc66","0x000000000000000000000000000000356a5ad59c646c746a8d09f5d154e47c4f","0x00000000000000000000000000000000000708529196af3c8e16ffa580c26182","0x0000000000000000000000000000002ddfe70eb7a1280596e8e4a804f118a6dd","0x000000000000000000000000000000000013757e15a0905f298303784a161b21","0x000000000000000000000000000000a23a729df796935c7824e3a26be794829b","0x000000000000000000000000000000000005775b6c146c4a59856e869fe5a70e","0x000000000000000000000000000000eef0c9e088fd2d45aa40311082d1f2809b","0x00000000000000000000000000000000001d539ccbfc556d0ad59307a218de65","0x000000000000000000000000000000a2c848beceb6ab7806fd3b88037b8410fc","0x0000000000000000000000000000000000177004deeb1f9d401fd7b1af1a5ac8","0x0000000000000000000000000000002508eb63672a733f20de1a97644be4f540","0x00000000000000000000000000000000000d82d51f2f75d806285fd248c819b8","0x000000000000000000000000000000d002f9100cbba8a29f13b11513c53c59d0","0x000000000000000000000000000000000006cd3b0e3460533b9e5ea2cdc0fcbb","0x000000000000000000000000000000f45ea38a93b2f810c5633ddb54927c1c96","0x000000000000000000000000000000000021791de65f9a28ec7024b1a87ab4f3","0x000000000000000000000000000000926511a0439502c86885a8c6f0327aa7ad","0x000000000000000000000000000000000029fa14a969c5d81ed3abbbfb11220a","0x000000000000000000000000000000b84c3258e8206f560e5b5b18cbeafef87e","0x00000000000000000000000000000000002a910445cd8fc895e5d235cd8ea185","0x000000000000000000000000000000887e67f15e84bcb8507a5064a363f6043b","0x000000000000000000000000000000000014dc6643d801c3ef27c2066b6e2bb4","0x000000000000000000000000000000e38e900b42c314ba803088e8fbf125203f","0x000000000000000000000000000000000020690fd4869db418306046b38161dc","0x0000000000000000000000000000001e2fa856bf7951b8292b1e88185993629c","0x0000000000000000000000000000000000048a85e0bbac7c60ad3d78f601f63c","0x0000000000000000000000000000006f457719495073d3666d77a625aeab0c51","0x00000000000000000000000000000000002623ad892dc62b1fa7d0a650f0d470","0x000000000000000000000000000000dbfcc8a467e021c03b13f74a9f79c3a10c","0x0000000000000000000000000000000000295f6f10976c37bd9c6f96bb7187d5","0x000000000000000000000000000000c13ef9a937cc12420fb38d9ab8e848e85e","0x000000000000000000000000000000000003560a3b334e887532f605c9cb7628","0x0000000000000000000000000000009bcebf08a4599cdda0fb96312d4dc0c7a9","0x000000000000000000000000000000000015adc8bb1e01c835f48959d1237bd6","0x00000000000000000000000000000047762ab839e4ff63c77605a9f383da37c2","0x000000000000000000000000000000000016a8c3c53d89660cf271522cd301fb","0x000000000000000000000000000000f0c8539a0b5f94420a513f9c305b932bfe","0x00000000000000000000000000000000002957ba01d9de5638f808f88a692533","0x000000000000000000000000000000ab17c6189d67d3bf5dd2f3885de0151b6f","0x0000000000000000000000000000000000060d8aa43fdc434d1942263f364d95","0x0000000000000000000000000000005d292333b3adb497f00b4bc32d45229060","0x00000000000000000000000000000000001a1018a66221883639f2898a66f345","0x00000000000000000000000000000006555a806b1993291deba0dc44e2abf431","0x00000000000000000000000000000000000cacff7099a9d5e35a21f4a00b2dc3","0x000000000000000000000000000000f50c11ba95d349c36d143eefd12e494950","0x00000000000000000000000000000000001022e8c5f02d639bc9dd8bc4407f99","0x000000000000000000000000000000c76828795098eda73d50b4b585c60afc60","0x00000000000000000000000000000000002bf09c0ec7011e93888962f2406630","0x00000000000000000000000000000049e5c83a8978d832fb8e144548e3ca1adb","0x00000000000000000000000000000000000e0ec242c2e160a984f61ca5adf5f5","0x0000000000000000000000000000009c5d6e08a6605ab4513748ac0fa017dd1c","0x00000000000000000000000000000000001f54baa07558e5fb055bd9ba49c067","0x0000000000000000000000000000001e1ee7ee29bbb5e4b080c6091c1433ce62","0x000000000000000000000000000000000024aec62a9d9763499267dc98c33428","0x0000000000000000000000000000001a96755946ff16f0d6632365f0eb0ab4d4","0x000000000000000000000000000000000028cf3e22bcd53782ebc3e0490e27e5","0x00000000000000000000000000000043148d7d8c9ba43f2133fab4201435a364","0x0000000000000000000000000000000000234ce541f1f5117dd404cfaf01a229","0x000000000000000000000000000000a7fb95ffb461d9514a1070e2d2403982ef","0x00000000000000000000000000000000003016955028b6390f446c3fd0c5b424","0x00000000000000000000000000000008863c3b7cd7cddc20ba79ce915051c56e","0x000000000000000000000000000000000013ef666111b0be56a235983d397d2a","0x000000000000000000000000000000e3993f465fc9f56e93ac769e597b752c1c","0x0000000000000000000000000000000000217f7c4235161e9a3c16c45b6ca499","0x0000000000000000000000000000008ffa4cd96bc67b0b7df5678271e1114075","0x0000000000000000000000000000000000256467bfcb63d9fdcb5dde397757ad","0x00000000000000000000000000000054e5eb270bb64bde6e6ececadfd8c3236c","0x00000000000000000000000000000000000e52d1bd75812c33c6f3d79ee4b94c","0x000000000000000000000000000000484a2c641dce55bc2dd64ef0cd790a7fea","0x00000000000000000000000000000000000ff417d256be43e73c8b1aa85bdda3","0x0000000000000000000000000000000b72e7b7713ab5da44e0f864182e748a23","0x00000000000000000000000000000000001a221055f1625ad833a44705f5f74e","0x00000000000000000000000000000067a99a34e9b81a17ad001db02e29bcb82a","0x000000000000000000000000000000000018a6c02e398389827568fa960e86e2","0x000000000000000000000000000000bb29f26f9890d6cc6401f4921d5884edca","0x00000000000000000000000000000000000868357b28039385c5a5058b6d358e","0x00000000000000000000000000000036fb6e229dde8edf7ec858b12d7e8be485","0x00000000000000000000000000000000001060afe929554ca473103f5e68193c","0x00000000000000000000000000000015226e07e207744c0857074dcab883af4a","0x00000000000000000000000000000000000b1c02619282755533457230b19b4a","0x0000000000000000000000000000001f2a0277e4807e6e1cbabca21dde5eb5e1","0x00000000000000000000000000000000000d928deafed363659688ed4ccdef52","0x000000000000000000000000000000363f0c994e91cecad25835338edee2294f","0x00000000000000000000000000000000002eea648c8732596b1314fe2a4d2f05","0x000000000000000000000000000000b2671d2ae51d31c1210433c3972bb64578","0x00000000000000000000000000000000000ab49886c2b94bd0bd3f6ed1dbbe2c"] -proof_b = ["0x0000000000000000000000000000000000000000000000000000000000000003","0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file +proof_b = ["0x000000000000000000000000000000f05c69448ca29bdf52076f9b073bb30fed","0x000000000000000000000000000000000028c86bb3e27b4aaaaef126f7df5349","0x00000000000000000000000000000026ae031fc93594375dfc7f3bbe027f97d5","0x000000000000000000000000000000000000dd12c7290fe7f775796a233b8590","0x000000000000000000000000000000c1ee6631704de424d010c5c4ac8293ac49","0x00000000000000000000000000000000002f41818c9aa83f5c8d9bdd128015b9","0x000000000000000000000000000000b50a5801482f7e3a5de8ab3cce0f10b0d3","0x000000000000000000000000000000000022a0bc69c293dbf293b25bc9eef7f8","0x0000000000000000000000000000003b02abf1967ef394154dc15d763135e903","0x00000000000000000000000000000000000d8a2ee46acc6d1ed8d517b56d47c8","0x00000000000000000000000000000039bf0d1b3d8cf9de898f101c626e978d78","0x0000000000000000000000000000000000008faa7df2451a24d291a9b584f1a5","0x000000000000000000000000000000c1dae329ed7adf63a2d89a5f16fb98b6d8","0x00000000000000000000000000000000001ff0bc16fc0bd4aa2d6255690453c2","0x000000000000000000000000000000d12d7589f853a9b472613efa56689beaf1","0x00000000000000000000000000000000002d6fbc798f4403751df6aeee8bedd3","0x0000000000000000000000000000007c1fa069cb17194fecf88db9dd54a4ee36","0x0000000000000000000000000000000000268e026f9814822a42b2d59eec5d24","0x000000000000000000000000000000c3fb56beab774218cd63498fc050a5fd9b","0x00000000000000000000000000000000000071c014d7b5063f005a0bc2ee1af4","0x000000000000000000000000000000ae12b25371c6af42bbe0a85cddd2eaebc7","0x000000000000000000000000000000000026d270e1ffc9c7c344c694dfadda83","0x00000000000000000000000000000080280858c6be461716921caa3c26f3f6f3","0x000000000000000000000000000000000001dcdd3f39e27d0ce6aa5d14dff4c1","0x000000000000000000000000000000080e1d2c913c834ebcf7e0600c076c08fd","0x00000000000000000000000000000000002df3d142217694e65fb7c355d62764","0x000000000000000000000000000000e5e336f3f59d77e500f49771bfbeb12e83","0x000000000000000000000000000000000028fffe08bdc4c0690643d2e1a1275f","0x000000000000000000000000000000db5618b32afc13e18f21b39f3fbede9d11","0x00000000000000000000000000000000001d244818370d43fb7e8bc67e03787b","0x0000000000000000000000000000006bcc1fd3f9f78449ad1df1bc11bc379edd","0x000000000000000000000000000000000009ac9cbb285edbf5b3a973f3f5f1cb","0x000000000000000000000000000000fd885905b6c0fc95bb4dd0b11f6797d4b3","0x000000000000000000000000000000000021f07995cdd835145e19c38127c562","0x000000000000000000000000000000bbbf2b975c2c97ae4b45c4a52059e53ee3","0x000000000000000000000000000000000024158163788841cf4590bbc1e89a90","0x0000000000000000000000000000009aca93d2b1386ea412d4b36ea5bb9894a8","0x00000000000000000000000000000000002532d1d210e8ed4c2f5c00cbaaa475","0x000000000000000000000000000000634a88caa1d77cb6b5fe77cac31458fc31","0x00000000000000000000000000000000000bdf18bae92fce7cfddab5520cac6e","0x000000000000000000000000000000622e9626255170ccec77602c755aa193e1","0x000000000000000000000000000000000001d4edba370e04436a988bad05dada","0x000000000000000000000000000000b52934323a0aec8f803cdaafee2ab7bfb2","0x0000000000000000000000000000000000155312af5e0e25ca9fd61aef9e58ed","0x06270b517855f6f6a608e432883d1d1030a12a1e33022dc142b7728691421da2","0x2af7c794d7b720b25eb1df0afd8c8e3c15b6e518194c3caea7966a5f8210ff04","0x073fe573aeb27d81a5713be93e1365390dcbc3c8e7439ff1d36a84cc014f5642","0x11351b961147431e54535248b58b35cf5cddb9b13827899167617d7a96794d64","0x297c9421c9c3db286770787c35b86bc41583386491b4ae55e5fa81aefa21efc4","0x0f4eeca3ff4a3495f859898937688652d33f9b4dd3e003e12adf15278e0997c3","0x133e3d8b82721d40d919f2326810ba6f07eff3f7d20d86b2bde692a811522019","0x2c502f53c9698b73bb8c8f9b9cf2d705d16a64a7040348b4b39c637a2064316c","0x0cbc1971e1c566cde9d9125c91cdc88e817db182692f836c1a5170a6246eaf73","0x12c47793e7db706c637cd4b4d96d227f569850176b852b1fe8ad522ddb38ef0e","0x0cd7b300e9309a135285be1aeb02b152f97931a7357ab6d609a2cb1970aab877","0x2a7789dfe286c9d0a7592f1c9316e730cb14c9d843aefc4764d76e7f8571c96a","0x248ac54ce3dbf37796621882a4ac76046df5ab680da487fd85cce76b1ae392d3","0x149d1d07cebe320f77b03533e34912545cedeae62bd9778d37724728762b5710","0x00fe29daebdaed61309790e70e2dcefa3f3af4c6c965ce424b8dbcf09b8e4b49","0x2b75b3bace61b731d7f0c003a144b62b0a4fbe9f0d14ca89b0652b70210014b3","0x2588ef27cfb6e0d8c6f9a969b2da44fead30a02ed70a563fd15aa45bb671de1c","0x2b74d7674b55642697b4a1e226eddb0e4918b2d57aa5b99093dc46cadcdea000","0x244c626845d3a5040f08f01e9611f968ad675ca857789149b13a0cfa83a2e064","0x2cb8d02f90cae33fd7bcfb80af4aff067c4f5fc4b3f9228d5b8f768bc8f6c971","0x1372f3d1f04e0c39a50e823d5da03d70bebe19a1b8e28f8c2ff601cc0bfc0095","0x19af6601d2613426a50b7c35d60562a5f2f2634e6af56dac13459632e15570ee","0x13c2a16ed3b65dcd9414659be79af17995d344de34eaf962343b0f1e76c73a57","0x0dd5dcdbd50b8774831d4f01f930804d38b4266dfee085185530880a0c3903c0","0x07e91848d660b11b722638680ac60f20db9507fdc8d610ce762600f5a1aacd29","0x1f9c2a94d10c0a7fb60292cfc46fd3d2501181bea0ffe1f5f2501d474be3a785","0x14edb9c5bd389eae08a5ea2a7a1662894e1e878c142084d966a625bef68cf7c3","0x00000000000000000000000000000000cecd01810814d175f0a533f0067618c4","0x00000000000000000000000000000000f82935013ce5c82720c63e533af41db8","0x000000000000000000000000000000012185688171b6bed850e748b66f7222ac","0x2dd7f5ff2150155c2ac86ebe28d9ecbca2eea812b0021ab2bceae111cfea8325","0x04ea6c2daf2b9e827d2213c3d03953410dcf1ed67ba34a3c00e772be92606a8b","0x163f2bd18dcde52f99b9867c944780fd718d1612927053b139b280fc55013d1b","0x05e388fd160ccac30a8f7b18a4bd042f705e92b5937e8c0e9478e2ff623907c6","0x00ba3f6f527d6ed3ff17a63b1d5be3c42bdfae88fdf63311fc7b871157939309","0x16187d9daa8c2e5a1a9ab15be7ca6a8feebfb31bea76f9a3ca69381881c70561","0x0f64522e4904edb7377b14a7b9dad848829167324ef5c016346b3ad8251191ee","0x273bbe6000a4001dce369e5a36cc0b0ca3fd351665b688238aa8c556a6ca6b8e","0x022d2232efb2faa8307846c9a4c697aabad1b7f1336b35ad72fa8922975b49d9","0x0d82d478bff3955c4b0a34ef94427ca5f9da23147ad953c89f2e428277ec2825","0x18d886be90343010659c231583be61a138e28e37c24771e3cb61fbe2587d0671","0x000000000000000000000000000000196ba6a58dbeb7c34cb1d6287e23d434de","0x00000000000000000000000000000000001df8ae8a1589590f8863c1fefd8dfd","0x000000000000000000000000000000f30e11b2c5fbefa166cbb9f58c5f8e1a4c","0x000000000000000000000000000000000026420ade7666bc0ab1cf1fd9d0c534","0x0000000000000000000000000000000feb5b7d8260d25a1ee1ce76ff461673fc","0x00000000000000000000000000000000002bd2ac6223a80671b777bf5dca70a4","0x000000000000000000000000000000690f757006d2fa1ddb0114c9f268783537","0x000000000000000000000000000000000023ad36feadd91e50118f32e97a0204"] \ No newline at end of file diff --git a/test_programs/execution_success/double_verify_proof/src/main.nr b/test_programs/execution_success/double_verify_proof/src/main.nr index 98cd534266a..ce087dc4e61 100644 --- a/test_programs/execution_success/double_verify_proof/src/main.nr +++ b/test_programs/execution_success/double_verify_proof/src/main.nr @@ -2,31 +2,27 @@ use dep::std; fn main( verification_key: [Field; 114], - proof: [Field; 94], + // This is the proof without public inputs attached. + // + // This means: the size of this does not change with the number of public inputs. + proof: [Field; 93], public_inputs: [Field; 1], + // This is currently not public. It is fine given that the vk is a part of the circuit definition. + // I believe we want to eventually make it public too though. key_hash: Field, - input_aggregation_object: [Field; 16], - proof_b: [Field; 94] -) -> pub [Field; 16] { - let output_aggregation_object_a = std::verify_proof( + proof_b: [Field; 93] +) { + std::verify_proof( verification_key.as_slice(), proof.as_slice(), public_inputs.as_slice(), - key_hash, - input_aggregation_object + key_hash ); - let output_aggregation_object = std::verify_proof( + std::verify_proof( verification_key.as_slice(), proof_b.as_slice(), public_inputs.as_slice(), - key_hash, - output_aggregation_object_a + key_hash ); - - let mut output = [0; 16]; - for i in 0..16 { - output[i] = output_aggregation_object[i]; - } - output } diff --git a/test_programs/execution_success/global_consts/src/main.nr b/test_programs/execution_success/global_consts/src/main.nr index a749ec77da6..70c7a745a22 100644 --- a/test_programs/execution_success/global_consts/src/main.nr +++ b/test_programs/execution_success/global_consts/src/main.nr @@ -55,17 +55,17 @@ fn main( let t: [Field; T_LEN] = [N, M]; assert(t[1] == 32); - assert(15 == mysubmodule::my_helper()); + assert(15 == my_submodule::my_helper()); - let add_submodules_N = mysubmodule::N + foo::bar::N; + let add_submodules_N = my_submodule::N + foo::bar::N; assert(15 == add_submodules_N); - let add_from_bar_N = mysubmodule::N + foo::bar::from_bar(1); + let add_from_bar_N = my_submodule::N + foo::bar::from_bar(1); assert(15 == add_from_bar_N); - // Example showing an array filled with (mysubmodule::N + 2) 0's - let sugared = [0; mysubmodule::N + 2]; - assert(sugared[mysubmodule::N + 1] == 0); + // Example showing an array filled with (my_submodule::N + 2) 0's + let sugared = [0; my_submodule::N + 2]; + assert(sugared[my_submodule::N + 1] == 0); - let arr: [Field; mysubmodule::N] = [N; 10]; + let arr: [Field; my_submodule::N] = [N; 10]; assert((arr[0] == 5) & (arr[9] == 5)); foo::from_foo(d); @@ -80,7 +80,7 @@ fn arrays_neq(a: [Field; M], b: [Field; M]) { assert(a != b); } -mod mysubmodule { +mod my_submodule { global N: Field = 10; global L: Field = 50; diff --git a/test_programs/execution_success/nested_slice_dynamic/Nargo.toml b/test_programs/execution_success/nested_array_in_slice/Nargo.toml similarity index 62% rename from test_programs/execution_success/nested_slice_dynamic/Nargo.toml rename to test_programs/execution_success/nested_array_in_slice/Nargo.toml index c8925ed97b4..4f0748f79be 100644 --- a/test_programs/execution_success/nested_slice_dynamic/Nargo.toml +++ b/test_programs/execution_success/nested_array_in_slice/Nargo.toml @@ -1,5 +1,5 @@ [package] -name = "nested_slice_dynamic" +name = "nested_array_in_slice" type = "bin" authors = [""] [dependencies] \ No newline at end of file diff --git a/test_programs/execution_success/nested_slice_dynamic/Prover.toml b/test_programs/execution_success/nested_array_in_slice/Prover.toml similarity index 100% rename from test_programs/execution_success/nested_slice_dynamic/Prover.toml rename to test_programs/execution_success/nested_array_in_slice/Prover.toml diff --git a/test_programs/execution_success/nested_slice_dynamic/src/main.nr b/test_programs/execution_success/nested_array_in_slice/src/main.nr similarity index 100% rename from test_programs/execution_success/nested_slice_dynamic/src/main.nr rename to test_programs/execution_success/nested_array_in_slice/src/main.nr diff --git a/test_programs/execution_success/operator_overloading/Nargo.toml b/test_programs/execution_success/operator_overloading/Nargo.toml new file mode 100644 index 00000000000..7f9f18ff567 --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "operator_overloading" +type = "bin" +authors = [""] +compiler_version = ">=0.20.0" + +[dependencies] diff --git a/test_programs/execution_success/operator_overloading/Prover.toml b/test_programs/execution_success/operator_overloading/Prover.toml new file mode 100644 index 00000000000..516b7b4074c --- /dev/null +++ b/test_programs/execution_success/operator_overloading/Prover.toml @@ -0,0 +1,2 @@ +x = 3 +y = 9 diff --git a/test_programs/execution_success/operator_overloading/src/main.nr b/test_programs/execution_success/operator_overloading/src/main.nr new file mode 100644 index 00000000000..3867531abca --- /dev/null +++ b/test_programs/execution_success/operator_overloading/src/main.nr @@ -0,0 +1,154 @@ +use dep::std::ops::{ Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shl, Shr }; +use dep::std::cmp::Ordering; + +// x = 3, y = 9 +fn main(x: u32, y: u32) { + let wx = Wrapper::new(x); + let wy = Wrapper::new(y); + + // expected x and expected y values + let ex: u32 = 3; + let ey: u32 = 9; + + assert((wx + wy).inner == ex + ey); + assert((wy - wx).inner == ey - ex); + assert((wx * wy).inner == ex * ey); + assert((wx / wy).inner == ex / ey); + assert((wx % wy).inner == ex % ey); + + assert((wx & wy).inner == (ex & ey)); + assert((wx | wy).inner == (ex | ey)); + assert((wx ^ wy).inner == (ex ^ ey)); + + assert((wy << wx).inner == (ey << ex)); + assert((wy >> wx).inner == (ey >> ex)); + + assert((wx == wy) == (ex == ey)); + assert((wx < wy) == (ex < ey)); + assert((wx <= wy) == (ex <= ey)); + assert((wx > wy) == (ex > ey)); + assert((wx >= wy) == (ex >= ey)); + assert(wx.cmp(wy) == ex.cmp(ey)); + + // Ensure operator overloading still works with more complex types + let pair_ascending = Pair { x: wx, y: wy }; + let pair_descending = Pair { x: wy, y: wx }; + + assert(pair_ascending != pair_descending); + + assert(pair_ascending < pair_descending); + assert(pair_ascending <= pair_descending); + assert(pair_descending > pair_ascending); + assert(pair_descending >= pair_ascending); + + assert(pair_ascending.cmp(pair_descending) == Ordering::less()); +} + +struct Wrapper { + inner: u32 +} + +impl Wrapper { + fn new(inner: u32) -> Self { + Wrapper { inner } + } +} + +impl Add for Wrapper { + fn add(self, other: Self) -> Self { + Wrapper::new(self.inner + other.inner) + } +} + +impl Sub for Wrapper { + fn sub(self, other: Self) -> Self { + Wrapper::new(self.inner - other.inner) + } +} + +impl Mul for Wrapper { + fn mul(self, other: Self) -> Self { + Wrapper::new(self.inner * other.inner) + } +} + +impl Div for Wrapper { + fn div(self, other: Self) -> Self { + Wrapper::new(self.inner / other.inner) + } +} + +impl Rem for Wrapper { + fn rem(self, other: Self) -> Self { + Wrapper::new(self.inner % other.inner) + } +} + +impl BitAnd for Wrapper { + fn bitand(self, other: Self) -> Self { + Wrapper::new(self.inner & other.inner) + } +} + +impl BitOr for Wrapper { + fn bitor(self, other: Self) -> Self { + Wrapper::new(self.inner | other.inner) + } +} + +impl BitXor for Wrapper { + fn bitxor(self, other: Self) -> Self { + Wrapper::new(self.inner ^ other.inner) + } +} + +impl Shl for Wrapper { + fn shl(self, other: Self) -> Self { + Wrapper::new(self.inner << other.inner) + } +} + +impl Shr for Wrapper { + fn shr(self, other: Self) -> Self { + Wrapper::new(self.inner >> other.inner) + } +} + +impl Eq for Wrapper { + fn eq(self, other: Self) -> bool { + self.inner == other.inner + } +} + +impl Ord for Wrapper { + fn cmp(self, other: Self) -> Ordering { + self.inner.cmp(other.inner) + } +} + + + + + +struct Pair { + x: Wrapper, + y: Wrapper, +} + +impl Eq for Pair { + fn eq(self, o: Self) -> bool { + (self.x == o.x) & (self.y == o.y) + } +} + +impl Ord for Pair { + fn cmp(self, o: Self) -> Ordering { + let mut result = self.x.cmp(o.x); + + if result == Ordering::equal() { + result = self.y.cmp(o.y); + } + + result + } +} diff --git a/tooling/nargo_cli/tests/execution_success/prelude/Nargo.toml b/test_programs/execution_success/prelude/Nargo.toml similarity index 100% rename from tooling/nargo_cli/tests/execution_success/prelude/Nargo.toml rename to test_programs/execution_success/prelude/Nargo.toml diff --git a/tooling/nargo_cli/tests/execution_success/prelude/src/main.nr b/test_programs/execution_success/prelude/src/main.nr similarity index 59% rename from tooling/nargo_cli/tests/execution_success/prelude/src/main.nr rename to test_programs/execution_success/prelude/src/main.nr index 9bf2ec18f3a..c9ae448c486 100644 --- a/tooling/nargo_cli/tests/execution_success/prelude/src/main.nr +++ b/test_programs/execution_success/prelude/src/main.nr @@ -1,6 +1,6 @@ -fn main(x: Field, y: pub Field) { - let xs = Vec::new(); - let option = Option::none(); +fn main() { + let _xs = Vec::new(); + let _option = Option::none(); print("42\n"); println("42"); @@ -10,11 +10,10 @@ mod a { // We don't want to give an error due to re-importing elements that are already in the prelude. use dep::std::collections::vec::Vec; use dep::std::option::Option; - use dep::{print, println}; fn main() { - let xs = Vec::new(); - let option = Option::none(); + let _xs = Vec::new(); + let _option = Option::none(); print("42\n"); println("42"); @@ -23,8 +22,8 @@ mod a { mod b { fn main() { - let xs = Vec::new(); - let option = Option::none(); + let _xs = Vec::new(); + let _option = Option::none(); print("42\n"); println("42"); diff --git a/test_programs/execution_success/regression_3889/Nargo.toml b/test_programs/execution_success/regression_3889/Nargo.toml new file mode 100644 index 00000000000..d212d24473f --- /dev/null +++ b/test_programs/execution_success/regression_3889/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "regression_3889" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/regression_3889/Prover.toml b/test_programs/execution_success/regression_3889/Prover.toml new file mode 100644 index 00000000000..a81ab67fe3e --- /dev/null +++ b/test_programs/execution_success/regression_3889/Prover.toml @@ -0,0 +1,10 @@ +[works] +a = "5" + +[fails] +a = "6" + + +[also_fails] +a = "7" + diff --git a/test_programs/execution_success/regression_3889/src/main.nr b/test_programs/execution_success/regression_3889/src/main.nr new file mode 100644 index 00000000000..10b8ecabee3 --- /dev/null +++ b/test_programs/execution_success/regression_3889/src/main.nr @@ -0,0 +1,23 @@ +mod Foo { + struct NewType{ + a: Field, + } +} + +mod Bar { + use crate::Foo::NewType as BarStruct; + use crate::Foo::NewType; +} + +mod Baz { + struct Works { + a: Field, + } + use crate::Bar::BarStruct; + use crate::Bar::NewType; +} + + +fn main(works: Baz::Works, fails: Baz::BarStruct, also_fails: Bar::NewType) -> pub Field { + works.a + fails.a + also_fails.a +} diff --git a/test_programs/execution_success/signed_comparison/Nargo.toml b/test_programs/execution_success/signed_comparison/Nargo.toml new file mode 100644 index 00000000000..c8de162877b --- /dev/null +++ b/test_programs/execution_success/signed_comparison/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "signed_comparison" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/signed_comparison/Prover.toml b/test_programs/execution_success/signed_comparison/Prover.toml new file mode 100644 index 00000000000..e0e584b7380 --- /dev/null +++ b/test_programs/execution_success/signed_comparison/Prover.toml @@ -0,0 +1,3 @@ +x = "5" +y = "8" +z = "-15" diff --git a/test_programs/execution_success/signed_comparison/src/main.nr b/test_programs/execution_success/signed_comparison/src/main.nr new file mode 100644 index 00000000000..d020be380fb --- /dev/null +++ b/test_programs/execution_success/signed_comparison/src/main.nr @@ -0,0 +1,13 @@ +use dep::std; + +fn main(mut x: i8, mut y: i8, z: i8) { + let mut s1: i8 = 5; + let mut s2: i8 = 8; + assert(-1 as i8 < 0); + assert(x < y); + assert(-x < y); + assert(-y < -x); + assert((z > x) == false); + assert(x <= s1); + assert(z < x - y - s2); +} diff --git a/test_programs/execution_success/slice_struct_field/Prover.toml b/test_programs/execution_success/slice_struct_field/Prover.toml deleted file mode 100644 index 7127baac5bf..00000000000 --- a/test_programs/execution_success/slice_struct_field/Prover.toml +++ /dev/null @@ -1 +0,0 @@ -y = "3" diff --git a/test_programs/execution_success/slice_struct_field/src/main.nr b/test_programs/execution_success/slice_struct_field/src/main.nr deleted file mode 100644 index a5b971ada4b..00000000000 --- a/test_programs/execution_success/slice_struct_field/src/main.nr +++ /dev/null @@ -1,472 +0,0 @@ -struct FooParent { - parent_arr: [Field; 3], - foos: [Foo], -} - -struct Bar { - inner: [Field; 3], -} - -struct Foo { - a: Field, - b: [Field], - bar: Bar, -} - -fn main(y: pub Field) { - let mut b_one = [2, 3, 20]; - b_one = b_one.push_back(20); - let foo_one = Foo { a: 1, b: b_one, bar: Bar { inner: [100, 101, 102] } }; - - let mut b_two = [5, 6, 21]; - b_two = b_two.push_back(21); - let foo_two = Foo { a: 4, b: b_two, bar: Bar { inner: [103, 104, 105] } }; - - let foo_three = Foo { a: 7, b: [8, 9, 22], bar: Bar { inner: [106, 107, 108] } }; - let mut foo_four = Foo { a: 10, b: [11, 12, 23], bar: Bar { inner: [109, 110, 111] } }; - - let mut x = [foo_one, foo_two]; - x = x.push_back(foo_three); - x = x.push_back(foo_four); - - assert(x[y - 3].a == 1); - let struct_slice = x[y - 3].b; - for i in 0..4 { - assert(struct_slice[i] == b_one[i]); - } - - assert(x[y - 2].a == 4); - let struct_slice = x[y - 2].b; - for i in 0..4 { - assert(struct_slice[i] == b_two[i]); - } - - assert(x[y - 1].a == 7); - let struct_slice = x[y - 1].b; - assert(struct_slice[0] == 8); - assert(struct_slice[1] == 9); - assert(struct_slice[2] == 22); - - assert(x[y].a == 10); - let struct_slice = x[y].b; - assert(struct_slice[0] == 11); - assert(struct_slice[1] == 12); - assert(struct_slice[2] == 23); - assert(x[y].bar.inner == [109, 110, 111]); - - assert(x[y - 3].bar.inner == [100, 101, 102]); - assert(x[y - 2].bar.inner == [103, 104, 105]); - assert(x[y - 1].bar.inner == [106, 107, 108]); - assert(x[y].bar.inner == [109, 110, 111]); - // Check that switching the lhs and rhs is still valid - assert([109, 110, 111] == x[y].bar.inner); - - assert(x[y - 3].bar.inner == [100, 101, 102]); - assert(x[y - 2].bar.inner == [103, 104, 105]); - assert(x[y - 1].bar.inner == [106, 107, 108]); - assert(x[y].bar.inner == [109, 110, 111]); - // Check that switching the lhs and rhs is still valid - assert([109, 110, 111] == x[y].bar.inner); - - // TODO: Enable merging nested slices - // if y != 2 { - // x[y].a = 50; - // } else { - // x[y].a = 100; - // } - // assert(x[3].a == 50); - // if y == 2 { - // x[y - 1].b = [50, 51, 52]; - // } else { - // x[y - 1].b = [100, 101, 102]; - // } - // assert(x[2].b[0] == 100); - // assert(x[2].b[1] == 101); - // assert(x[2].b[2] == 102); - - let q = x.push_back(foo_four); - let foo_parent_one = FooParent { parent_arr: [0, 1, 2], foos: x }; - let foo_parent_two = FooParent { parent_arr: [3, 4, 5], foos: q }; - let mut foo_parents = [foo_parent_one]; - foo_parents = foo_parents.push_back(foo_parent_two); - // TODO: make a separate test for entirely compile time - // foo_parents[1].foos.push_back(foo_four); - // TODO: Merging nested slices is broken - // if y == 3 { - // foo_parents[y - 2].foos[y - 1].b[y - 1] = 5000; - // } else { - // foo_parents[y - 2].foos[y - 1].b[y - 1] = 1000; - // } - - assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 21); - foo_parents[y - 2].foos[y - 2].b[y - 1] = 5000; - assert(foo_parents[y - 2].foos[y - 2].b[y - 1] == 5000); - - let b_array = foo_parents[y - 2].foos[y - 3].b; - assert(foo_parents[y - 2].foos[y - 3].a == 1); - assert(b_array[0] == 2); - assert(b_array[1] == 3); - assert(b_array[2] == 20); - assert(b_array[3] == 20); - - let b_array = foo_parents[y - 2].foos[y - 2].b; - assert(foo_parents[y - 2].foos[y - 2].a == 4); - assert(b_array[0] == 5); - assert(b_array[1] == 6); - assert(b_array[2] == 5000); - assert(b_array[3] == 21); - - assert(foo_parents[y - 2].foos[y - 1].a == 7); - foo_parents[y - 2].foos[y - 1].a = 50; - assert(foo_parents[y - 2].foos[y - 1].a == 50); - - let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[0] == 8); - assert(b_array[1] == 9); - assert(b_array[2] == 22); - assert(b_array.len() == 3); - - // // Test setting a nested array with non-dynamic - let x = [5, 6, 5000, 21, 100, 101].as_slice(); - foo_parents[y - 2].foos[y - 1].b = x; - - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(foo_parents[y - 2].foos[y - 1].b[4] == 100); - assert(foo_parents[y - 2].foos[y - 1].b[5] == 101); - - // Need to account for that foo_parents is not modified outside of this function - test_basic_intrinsics_nested_slices(foo_parents, y); - test_complex_intrinsic_nested_slices(foo_parents, y); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); - - let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); - foo_parents[y - 2].foos[y - 1].b = popped_slice; - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(last_elem == 500); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); - - assert(foo_parents[y - 2].foos.len() == 5); - foo_four.a = 40; - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - assert(foo_parents[y - 2].foos.len() == 6); - assert(foo_parents[y - 2].foos[y + 2].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - assert(foo_parents[y - 2].foos.len() == 7); - assert(foo_parents[y - 2].foos[6].a == 40); - assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); - assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - assert(foo_parents[y - 2].foos.len() == 8); - assert(foo_parents[y - 2].foos[6].a == 40); - assert(foo_parents[y - 2].foos[5].bar.inner == [109, 110, 111]); - assert(foo_parents[y - 2].foos[6].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - assert(foo_parents[y - 2].foos.len() == 9); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo_four); - assert(foo_parents[y - 2].foos.len() == 10); - - let b_array = foo_parents[y - 2].foos[y - 1].b; - assert(b_array[0] == 11); - assert(b_array[1] == 5); - assert(b_array[2] == 6); - assert(b_array[3] == 5000); - - let b_array = foo_parents[y - 2].foos[y].b; - assert(foo_parents[y - 2].foos[y].a == 10); - assert(b_array[0] == 11); - assert(b_array[1] == 12); - assert(b_array[2] == 23); - - assert(foo_parents[y - 2].foos[y - 3].bar.inner == [100, 101, 102]); - assert(foo_parents[y - 2].foos[y - 2].bar.inner == [103, 104, 105]); - assert(foo_parents[y - 2].foos[y - 1].bar.inner == [106, 107, 108]); - assert(foo_parents[y - 2].foos[y].bar.inner == [109, 110, 111]); -} - -fn test_basic_intrinsics_nested_slices(mut foo_parents: [FooParent], y: Field) { - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[6] == 500); - - let (popped_slice, last_elem) = foo_parents[y - 2].foos[y - 1].b.pop_back(); - foo_parents[y - 2].foos[y - 1].b = popped_slice; - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(last_elem == 500); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_front(11); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[0] == 11); - - let (first_elem, rest_of_slice) = foo_parents[y - 2].foos[y - 1].b.pop_front(); - foo_parents[y - 2].foos[y - 1].b = rest_of_slice; - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(first_elem == 11); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.insert(2, 20); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 7); - assert(foo_parents[y - 2].foos[y - 1].b[y - 1] == 20); - assert(foo_parents[y - 2].foos[y - 1].b[y] == 5000); - assert(foo_parents[y - 2].foos[y - 1].b[6] == 101); - - let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos[y - 1].b.remove(3); - foo_parents[y - 2].foos[y - 1].b = rest_of_slice; - assert(removed_elem == 5000); - assert(foo_parents[y - 2].foos[y - 1].b.len() == 6); - assert(foo_parents[y - 2].foos[y - 1].b[2] == 20); - assert(foo_parents[y - 2].foos[y - 1].b[3] == 21); -} - -// This method test intrinsics on nested slices with complex inputs such as -// pushing a `Foo` struct onto a slice in `FooParents`. -fn test_complex_intrinsic_nested_slices(mut foo_parents: [FooParent], y: Field) { - let mut foo = Foo { a: 13, b: [14, 15, 16], bar: Bar { inner: [109, 110, 111] } }; - assert(foo_parents[y - 2].foos.len() == 5); - foo.a = 40; - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_back(foo); - assert(foo_parents[1].foos.len() == 6); - assert(foo_parents[1].foos[5].a == 40); - assert(foo_parents[1].foos[5].b[0] == 14); - assert(foo_parents[1].foos[5].b[2] == 16); - assert(foo_parents[1].foos[5].b.len() == 3); - assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos[y - 1].b = foo_parents[y - 2].foos[y - 1].b.push_back(500); - assert(foo_parents[1].foos[2].b.len() == 7); - assert(foo_parents[1].foos[2].b[6] == 500); - assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); - assert(foo_parents[1].foos[5].a == 40); - assert(foo_parents[1].foos[5].b[0] == 14); - assert(foo_parents[1].foos[5].b[2] == 16); - assert(foo_parents[1].foos[5].b.len() == 3); - assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); - - let (popped_slice, last_foo) = foo_parents[y - 2].foos.pop_back(); - foo_parents[y - 2].foos = popped_slice; - assert(foo_parents[y - 2].foos.len() == 5); - assert(last_foo.a == 40); - assert(last_foo.b[0] == 14); - assert(last_foo.b[1] == 15); - assert(last_foo.b[2] == 16); - assert(last_foo.bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.push_front(foo); - assert(foo_parents[1].foos.len() == 6); - assert(foo_parents[1].foos[0].a == 40); - assert(foo_parents[1].foos[0].b[0] == 14); - assert(foo_parents[1].foos[0].b[1] == 15); - assert(foo_parents[1].foos[0].b[2] == 16); - assert(foo_parents[1].foos[5].a == 10); - assert(foo_parents[1].foos[5].b.len() == 3); - assert(foo_parents[1].foos[5].b[0] == 11); - assert(foo_parents[1].foos[5].b[2] == 23); - assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[1].a == 1); - assert(foo_parents[1].foos[1].bar.inner == [100, 101, 102]); - - let (first_foo, rest_of_slice) = foo_parents[y - 2].foos.pop_front(); - - foo_parents[y - 2].foos = rest_of_slice; - assert(first_foo.a == 40); - assert(first_foo.b[0] == 14); - assert(first_foo.b[1] == 15); - assert(first_foo.b[2] == 16); - assert(first_foo.bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[0].a == 1); - assert(foo_parents[1].foos[0].b[0] == 2); - assert(foo_parents[1].foos[0].b[1] == 3); - assert(foo_parents[1].foos[0].b[2] == 20); - assert(foo_parents[1].foos[0].b[3] == 20); - assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); - - test_insert_remove_const_index(foo_parents, y, foo); - - // Check values before insertion - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos.len() == 5); - assert(foo_parents[1].foos[2].a == 50); - assert(foo_parents[1].foos[2].b[0] == 5); - assert(foo_parents[1].foos[2].b[2] == 5000); - assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[3].a == 10); - assert(foo_parents[1].foos[3].b[0] == 11); - assert(foo_parents[1].foos[3].b[2] == 23); - assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(y - 1, foo); - assert(foo_parents[1].foos.len() == 6); - - // Check values correctly moved after insertion - assert(foo_parents[1].foos[0].a == 1); - assert(foo_parents[1].foos[0].b[0] == 2); - assert(foo_parents[1].foos[0].b[1] == 3); - assert(foo_parents[1].foos[0].b[2] == 20); - assert(foo_parents[1].foos[0].b[3] == 20); - assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); - - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos[2].a == 40); - assert(foo_parents[1].foos[2].b[0] == 14); - assert(foo_parents[1].foos[2].b[2] == 16); - assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[3].a == 50); - assert(foo_parents[1].foos[3].b[0] == 5); - assert(foo_parents[1].foos[3].b[2] == 5000); - assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[4].a == 10); - assert(foo_parents[1].foos[4].b[0] == 11); - assert(foo_parents[1].foos[4].b[2] == 23); - assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[5].a == 10); - assert(foo_parents[1].foos[5].b[0] == 11); - assert(foo_parents[1].foos[5].b[2] == 23); - assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); - - let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(y - 1); - foo_parents[1].foos = rest_of_slice; - - // Check that the accurate element was removed - assert(removed_elem.a == 40); - assert(removed_elem.b[0] == 14); - assert(removed_elem.b[2] == 16); - assert(removed_elem.bar.inner == [109, 110, 111]); - - // Check that we have altered our slice accurately following a removal - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos[2].a == 50); - assert(foo_parents[1].foos[2].b[0] == 5); - assert(foo_parents[1].foos[2].b[2] == 5000); - assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[3].a == 10); - assert(foo_parents[1].foos[3].b[0] == 11); - assert(foo_parents[1].foos[3].b[2] == 23); - assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[4].b[0] == 11); - assert(foo_parents[1].foos[4].b[2] == 23); - assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); -} - -fn test_insert_remove_const_index(mut foo_parents: [FooParent], y: Field, foo: Foo) { - // Check values before insertion - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos.len() == 5); - assert(foo_parents[1].foos[2].a == 50); - assert(foo_parents[1].foos[2].b[0] == 5); - assert(foo_parents[1].foos[2].b[2] == 5000); - assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[3].a == 10); - assert(foo_parents[1].foos[3].b[0] == 11); - assert(foo_parents[1].foos[3].b[2] == 23); - assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); - - foo_parents[y - 2].foos = foo_parents[y - 2].foos.insert(2, foo); - assert(foo_parents[1].foos.len() == 6); - - // Check values correctly moved after insertion - assert(foo_parents[1].foos[0].a == 1); - assert(foo_parents[1].foos[0].b[0] == 2); - assert(foo_parents[1].foos[0].b[1] == 3); - assert(foo_parents[1].foos[0].b[2] == 20); - assert(foo_parents[1].foos[0].b[3] == 20); - assert(foo_parents[1].foos[0].bar.inner == [100, 101, 102]); - - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos[2].a == 40); - assert(foo_parents[1].foos[2].b[0] == 14); - assert(foo_parents[1].foos[2].b[2] == 16); - assert(foo_parents[1].foos[2].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[3].a == 50); - assert(foo_parents[1].foos[3].b[0] == 5); - assert(foo_parents[1].foos[3].b[2] == 5000); - assert(foo_parents[1].foos[3].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[4].a == 10); - assert(foo_parents[1].foos[4].b[0] == 11); - assert(foo_parents[1].foos[4].b[2] == 23); - assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[5].a == 10); - assert(foo_parents[1].foos[5].b[0] == 11); - assert(foo_parents[1].foos[5].b[2] == 23); - assert(foo_parents[1].foos[5].bar.inner == [109, 110, 111]); - - let (rest_of_slice, removed_elem) = foo_parents[y - 2].foos.remove(2); - foo_parents[1].foos = rest_of_slice; - - // Check that the accurate element was removed - assert(removed_elem.a == 40); - assert(removed_elem.b[0] == 14); - assert(removed_elem.b[2] == 16); - assert(removed_elem.bar.inner == [109, 110, 111]); - - // Check that we have altered our slice accurately following a removal - assert(foo_parents[1].foos[1].a == 4); - assert(foo_parents[1].foos[1].b[0] == 5); - assert(foo_parents[1].foos[1].b[1] == 6); - assert(foo_parents[1].foos[1].b[2] == 5000); - assert(foo_parents[1].foos[1].b[3] == 21); - assert(foo_parents[1].foos[1].bar.inner == [103, 104, 105]); - - assert(foo_parents[1].foos[2].a == 50); - assert(foo_parents[1].foos[2].b[0] == 5); - assert(foo_parents[1].foos[2].b[2] == 5000); - assert(foo_parents[1].foos[2].bar.inner == [106, 107, 108]); - - assert(foo_parents[1].foos[3].a == 10); - assert(foo_parents[1].foos[3].b[0] == 11); - assert(foo_parents[1].foos[3].b[2] == 23); - assert(foo_parents[1].foos[3].bar.inner == [109, 110, 111]); - - assert(foo_parents[1].foos[4].b[0] == 11); - assert(foo_parents[1].foos[4].b[2] == 23); - assert(foo_parents[1].foos[4].bar.inner == [109, 110, 111]); -} diff --git a/test_programs/execution_success/submodules/src/main.nr b/test_programs/execution_success/submodules/src/main.nr index 813f3a26a20..f937af74627 100644 --- a/test_programs/execution_success/submodules/src/main.nr +++ b/test_programs/execution_success/submodules/src/main.nr @@ -1,11 +1,11 @@ -use mysubmodule::my_helper; +use my_submodule::my_helper; fn main(x: u1, y: u1) { my_helper(); - mysubmodule::my_bool_or(x, y); + my_submodule::my_bool_or(x, y); } -mod mysubmodule { +mod my_submodule { pub fn my_bool_or(x: u1, y: u1) { assert(x | y == 1); } diff --git a/tooling/noir_codegen/test/assert_lt/Nargo.toml b/test_programs/execution_success/u128/Nargo.toml similarity index 73% rename from tooling/noir_codegen/test/assert_lt/Nargo.toml rename to test_programs/execution_success/u128/Nargo.toml index f32ec18cae7..c1dcd84db04 100644 --- a/tooling/noir_codegen/test/assert_lt/Nargo.toml +++ b/test_programs/execution_success/u128/Nargo.toml @@ -1,5 +1,6 @@ [package] -name = "assert_lt" +name = "u128" type = "bin" authors = [""] + [dependencies] diff --git a/test_programs/execution_success/u128/Prover.toml b/test_programs/execution_success/u128/Prover.toml new file mode 100644 index 00000000000..961db9825a7 --- /dev/null +++ b/test_programs/execution_success/u128/Prover.toml @@ -0,0 +1,7 @@ +x = "3" +y = "4" +z = "7" +hexa ="0x1f03a" +[big_int] +lo = 1 +hi = 2 \ No newline at end of file diff --git a/test_programs/execution_success/u128/src/main.nr b/test_programs/execution_success/u128/src/main.nr new file mode 100644 index 00000000000..4c734f3a8f9 --- /dev/null +++ b/test_programs/execution_success/u128/src/main.nr @@ -0,0 +1,44 @@ +use dep::std; + +fn main(mut x: u32, y: u32, z: u32, big_int: U128, hexa: str<7>) { + let a = U128::from_u64s_le(x as u64, x as u64); + let b = U128::from_u64s_le(y as u64, x as u64); + let c = a + b; + assert(c.lo == z as Field); + assert(c.hi == 2 * x as Field); + assert(U128::from_hex(hexa).lo == 0x1f03a); + let t1 = U128::from_hex("0x9d9c7a87771f03a23783f9d9c7a8777"); + let t2 = U128::from_hex("0x45a26c708BFCF39041"); + let t = t1 + t2; + assert(t.lo == 0xc5e4b029996e17b8); + assert(t.hi == 0x09d9c7a87771f07f); + let t3 = U128::from_le_bytes(t.to_le_bytes()); + assert(t == t3); + + let t4 = t - t2; + assert(t4 == t1); + + let t5 = U128::from_u64s_le(0, 1); + let t6 = U128::from_u64s_le(1, 0); + assert((t5 - t6).hi == 0); + + assert( + (U128::from_hex("0x71f03a23783f9d9c7a8777") * U128::from_hex("0x8BFCF39041")).hi + == U128::from_hex("0x3e4e0471b873470e247c824e61445537").hi + ); + let q = U128::from_hex("0x3e4e0471b873470e247c824e61445537") / U128::from_hex("0x8BFCF39041"); + assert(q == U128::from_hex("0x71f03a23783f9d9c7a8777")); + + assert(big_int.hi == 2); + + let mut small_int = U128::from_integer(x); + assert(small_int.lo == x as Field); + assert(x == small_int.to_integer()); + let shift = small_int << small_int; + assert(shift == U128::from_integer(x << x)); + assert(shift >> small_int == small_int); + assert(shift >> U128::from_integer(127) == U128::from_integer(0)); + assert(shift << U128::from_integer(127) == U128::from_integer(0)); + +} + diff --git a/test_programs/compile_success_empty/unconstrained_empty/Nargo.toml b/test_programs/execution_success/unconstrained_empty/Nargo.toml similarity index 100% rename from test_programs/compile_success_empty/unconstrained_empty/Nargo.toml rename to test_programs/execution_success/unconstrained_empty/Nargo.toml diff --git a/test_programs/compile_success_empty/unconstrained_empty/src/main.nr b/test_programs/execution_success/unconstrained_empty/src/main.nr similarity index 100% rename from test_programs/compile_success_empty/unconstrained_empty/src/main.nr rename to test_programs/execution_success/unconstrained_empty/src/main.nr diff --git a/test_programs/execution_success/unsafe_range_constraint/Nargo.toml b/test_programs/execution_success/unsafe_range_constraint/Nargo.toml new file mode 100644 index 00000000000..8714d95ed54 --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "unsafe_range_constraint" +version = "0.1.0" +type = "bin" +authors = [""] + +[dependencies] diff --git a/test_programs/execution_success/unsafe_range_constraint/Prover.toml b/test_programs/execution_success/unsafe_range_constraint/Prover.toml new file mode 100644 index 00000000000..07890234a19 --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/Prover.toml @@ -0,0 +1 @@ +x = "3" diff --git a/test_programs/execution_success/unsafe_range_constraint/src/main.nr b/test_programs/execution_success/unsafe_range_constraint/src/main.nr new file mode 100644 index 00000000000..ead5613bcce --- /dev/null +++ b/test_programs/execution_success/unsafe_range_constraint/src/main.nr @@ -0,0 +1,5 @@ +// Test that we can apply a range constraint to a field using +// a builtin. +fn main(x: Field) { + x.assert_max_bit_size(48); +} diff --git a/test_programs/gates_report.sh b/test_programs/gates_report.sh index e06e6812e9d..4192c581376 100755 --- a/test_programs/gates_report.sh +++ b/test_programs/gates_report.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e # These tests are incompatible with gas reporting diff --git a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr index a677b10b0cd..253e999ce07 100644 --- a/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr +++ b/test_programs/noir_test_failure/should_fail_mismatch/src/main.nr @@ -10,5 +10,6 @@ fn test_with_extra_space() { // The assert message has a space #[test(should_fail_with = "Not equal")] fn test_runtime_mismatch() { - assert_eq(dep::std::hash::pedersen_commitment([27])[0], 0, "Not equal "); + // We use a pedersen commitment here so that the assertion failure is only known at runtime. + assert_eq(dep::std::hash::pedersen_commitment([27]).x, 0, "Not equal "); } diff --git a/test_programs/noir_test_success/field_comparisons/Nargo.toml b/test_programs/noir_test_success/field_comparisons/Nargo.toml new file mode 100644 index 00000000000..e819464ca68 --- /dev/null +++ b/test_programs/noir_test_success/field_comparisons/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "field_comparisons" +type = "bin" +authors = [""] +[dependencies] diff --git a/test_programs/noir_test_success/field_comparisons/Prover.toml b/test_programs/noir_test_success/field_comparisons/Prover.toml new file mode 100644 index 00000000000..e69de29bb2d diff --git a/test_programs/noir_test_success/field_comparisons/src/main.nr b/test_programs/noir_test_success/field_comparisons/src/main.nr new file mode 100644 index 00000000000..105d82ca755 --- /dev/null +++ b/test_programs/noir_test_success/field_comparisons/src/main.nr @@ -0,0 +1,16 @@ +use dep::std::field::bn254::{TWO_POW_128, assert_gt}; + +#[test(should_fail)] +fn test_assert_gt_should_fail_eq() { + assert_gt(0, 0); +} + +#[test(should_fail)] +fn test_assert_gt_should_fail_low_lt() { + assert_gt(0, 0x100); +} + +#[test(should_fail)] +fn test_assert_gt_should_fail_high_lt() { + assert_gt(TWO_POW_128, TWO_POW_128 + 0x100); +} diff --git a/test_programs/rebuild.sh b/test_programs/rebuild.sh index d879ca417ee..a3137920fd5 100755 --- a/test_programs/rebuild.sh +++ b/test_programs/rebuild.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/usr/bin/env bash set -e process_dir() { @@ -14,7 +14,7 @@ process_dir() { if [ -d ./target/ ]; then rm -r ./target/ fi - cargo run compile --only-acir && cargo run execute witness + nargo compile --only-acir && nargo execute witness if [ -d "$current_dir/acir_artifacts/$dir_name/target" ]; then rm -r "$current_dir/acir_artifacts/$dir_name/target" diff --git a/test_programs/test_libraries/exporting_lib/Nargo.toml b/test_programs/test_libraries/exporting_lib/Nargo.toml new file mode 100644 index 00000000000..628418c0608 --- /dev/null +++ b/test_programs/test_libraries/exporting_lib/Nargo.toml @@ -0,0 +1,6 @@ +[package] +name = "exporting_lib" +type = "lib" +authors = [""] + +[dependencies] diff --git a/test_programs/test_libraries/exporting_lib/src/lib.nr b/test_programs/test_libraries/exporting_lib/src/lib.nr new file mode 100644 index 00000000000..af1fd7a32de --- /dev/null +++ b/test_programs/test_libraries/exporting_lib/src/lib.nr @@ -0,0 +1,10 @@ + +struct MyStruct { + inner: Field +} + +type FooStruct = MyStruct; + +fn is_struct_zero(val: MyStruct) -> bool { + val.inner == 0 +} diff --git a/test_programs/test_libraries/reexporting_lib/Nargo.toml b/test_programs/test_libraries/reexporting_lib/Nargo.toml new file mode 100644 index 00000000000..c26ce501e56 --- /dev/null +++ b/test_programs/test_libraries/reexporting_lib/Nargo.toml @@ -0,0 +1,7 @@ +[package] +name = "reexporting_lib" +type = "lib" +authors = [""] + +[dependencies] +exporting_lib = { path = "../exporting_lib" } diff --git a/test_programs/test_libraries/reexporting_lib/src/lib.nr b/test_programs/test_libraries/reexporting_lib/src/lib.nr new file mode 100644 index 00000000000..f12dfe01ecd --- /dev/null +++ b/test_programs/test_libraries/reexporting_lib/src/lib.nr @@ -0,0 +1,3 @@ +use dep::exporting_lib::{MyStruct, FooStruct}; + +use dep::exporting_lib as lib; diff --git a/tooling/backend_interface/CHANGELOG.md b/tooling/backend_interface/CHANGELOG.md index 4387d8ccb5f..9ebde989add 100644 --- a/tooling/backend_interface/CHANGELOG.md +++ b/tooling/backend_interface/CHANGELOG.md @@ -214,7 +214,7 @@ * Avoid exposing internals of Assignments type ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) * avoid exposing internals of Assignments type ([#119](https://github.com/noir-lang/acvm-backend-barretenberg/issues/119)) ([614c81b](https://github.com/noir-lang/acvm-backend-barretenberg/commit/614c81b0ea5e110bbf5a61a526bb0173f4fe377a)) -* fix serialisation of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) +* fix serialization of arithmetic expressions ([#145](https://github.com/noir-lang/acvm-backend-barretenberg/issues/145)) ([7f42535](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7f4253570257d9dedcfa8c8fb96b9d097ef06419)) * Implement random_get for wasm backend ([#102](https://github.com/noir-lang/acvm-backend-barretenberg/issues/102)) ([9c0f06e](https://github.com/noir-lang/acvm-backend-barretenberg/commit/9c0f06ef56f23e2b5794e810f433e36ff2c5d6b5)) * rename gates to opcodes ([#59](https://github.com/noir-lang/acvm-backend-barretenberg/issues/59)) ([6e05307](https://github.com/noir-lang/acvm-backend-barretenberg/commit/6e053072d8b9c5d93c296f10782251ccb597f902)) * reorganize and ensure contracts can be compiled in Remix ([#112](https://github.com/noir-lang/acvm-backend-barretenberg/issues/112)) ([7ec5693](https://github.com/noir-lang/acvm-backend-barretenberg/commit/7ec5693f194a79c379ae2952bc17a31ee63a42b9)) diff --git a/tooling/backend_interface/Cargo.toml b/tooling/backend_interface/Cargo.toml index a9217af65d2..2d991f9ae6c 100644 --- a/tooling/backend_interface/Cargo.toml +++ b/tooling/backend_interface/Cargo.toml @@ -16,9 +16,9 @@ thiserror.workspace = true serde.workspace = true serde_json.workspace = true bb_abstraction_leaks.workspace = true -log.workspace = true +tracing.workspace = true -tempfile = "3.6.0" +tempfile.workspace = true ## bb binary downloading tar = "~0.4.15" diff --git a/tooling/backend_interface/src/cli/info.rs b/tooling/backend_interface/src/cli/info.rs index d3fd89bd2bc..934351dd517 100644 --- a/tooling/backend_interface/src/cli/info.rs +++ b/tooling/backend_interface/src/cli/info.rs @@ -1,9 +1,8 @@ -use acvm::Language; +use acvm::ExpressionWidth; use serde::Deserialize; -use std::collections::HashSet; use std::path::{Path, PathBuf}; -use crate::{BackendError, BackendOpcodeSupport}; +use crate::BackendError; use super::string_from_stderr; @@ -14,8 +13,6 @@ pub(crate) struct InfoCommand { #[derive(Deserialize)] struct InfoResponse { language: LanguageResponse, - opcodes_supported: Vec, - black_box_functions_supported: Vec, } #[derive(Deserialize)] @@ -24,20 +21,8 @@ struct LanguageResponse { width: Option, } -impl BackendOpcodeSupport { - fn new(info: InfoResponse) -> Self { - let opcodes: HashSet = info.opcodes_supported.into_iter().collect(); - let black_box_functions: HashSet = - info.black_box_functions_supported.into_iter().collect(); - Self { opcodes, black_box_functions } - } -} - impl InfoCommand { - pub(crate) fn run( - self, - binary_path: &Path, - ) -> Result<(Language, BackendOpcodeSupport), BackendError> { + pub(crate) fn run(self, binary_path: &Path) -> Result { let mut command = std::process::Command::new(binary_path); command.arg("info").arg("-c").arg(self.crs_path).arg("-o").arg("-"); @@ -50,32 +35,27 @@ impl InfoCommand { let backend_info: InfoResponse = serde_json::from_slice(&output.stdout).expect("Backend should return valid json"); - let language: Language = match backend_info.language.name.as_str() { + let expression_width: ExpressionWidth = match backend_info.language.name.as_str() { "PLONK-CSAT" => { let width = backend_info.language.width.unwrap(); - Language::PLONKCSat { width } + ExpressionWidth::Bounded { width } } - "R1CS" => Language::R1CS, - _ => panic!("Unknown langauge"), + "R1CS" => ExpressionWidth::Unbounded, + _ => panic!("Unknown Expression width configuration"), }; - Ok((language, BackendOpcodeSupport::new(backend_info))) + Ok(expression_width) } } #[test] fn info_command() -> Result<(), BackendError> { - use acvm::acir::circuit::opcodes::Opcode; - - use acvm::acir::native_types::Expression; - let backend = crate::get_mock_backend()?; let crs_path = backend.backend_directory(); - let (language, opcode_support) = InfoCommand { crs_path }.run(backend.binary_path())?; + let expression_width = InfoCommand { crs_path }.run(backend.binary_path())?; - assert!(matches!(language, Language::PLONKCSat { width: 3 })); - assert!(opcode_support.is_opcode_supported(&Opcode::Arithmetic(Expression::default()))); + assert!(matches!(expression_width, ExpressionWidth::Bounded { width: 3 })); Ok(()) } diff --git a/tooling/backend_interface/src/cli/write_vk.rs b/tooling/backend_interface/src/cli/write_vk.rs index 8d4aa9cc7e3..da9fc04cbef 100644 --- a/tooling/backend_interface/src/cli/write_vk.rs +++ b/tooling/backend_interface/src/cli/write_vk.rs @@ -12,6 +12,7 @@ pub(crate) struct WriteVkCommand { } impl WriteVkCommand { + #[tracing::instrument(level = "trace", name = "vk_generation", skip_all)] pub(crate) fn run(self, binary_path: &Path) -> Result<(), BackendError> { let mut command = std::process::Command::new(binary_path); diff --git a/tooling/backend_interface/src/lib.rs b/tooling/backend_interface/src/lib.rs index d25319e11d5..eab98852555 100644 --- a/tooling/backend_interface/src/lib.rs +++ b/tooling/backend_interface/src/lib.rs @@ -1,18 +1,18 @@ #![warn(unused_crate_dependencies, unused_extern_crates)] #![warn(unreachable_pub)] -use std::{collections::HashSet, path::PathBuf}; +use std::path::PathBuf; mod cli; mod download; mod proof_system; mod smart_contract; -use acvm::acir::circuit::Opcode; -use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; +pub use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use bb_abstraction_leaks::BB_VERSION; use cli::VersionCommand; pub use download::download_backend; +use tracing::warn; const BACKENDS_DIR: &str = ".nargo/backends"; @@ -116,7 +116,7 @@ impl Backend { // If version doesn't match then download the correct version. Ok(version_string) => { - log::warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); + warn!("`{ACVM_BACKEND_BARRETENBERG}` version `{version_string}` is different from expected `{BB_VERSION}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; @@ -124,7 +124,7 @@ impl Backend { // If `bb` fails to report its version, then attempt to fix it by re-downloading the binary. Err(_) => { - log::warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); + warn!("Could not determine version of `{ACVM_BACKEND_BARRETENBERG}`. Downloading expected version..."); let bb_url = std::env::var("BB_BINARY_URL") .unwrap_or_else(|_| bb_abstraction_leaks::BB_DOWNLOAD_URL.to_owned()); download_backend(&bb_url, binary_path)?; @@ -135,61 +135,13 @@ impl Backend { } } -pub struct BackendOpcodeSupport { - opcodes: HashSet, - black_box_functions: HashSet, -} - -impl BackendOpcodeSupport { - pub fn is_opcode_supported(&self, opcode: &Opcode) -> bool { - match opcode { - Opcode::Arithmetic(_) => self.opcodes.contains("arithmetic"), - Opcode::Directive(_) => self.opcodes.contains("directive"), - Opcode::Brillig(_) => self.opcodes.contains("brillig"), - Opcode::MemoryInit { .. } => self.opcodes.contains("memory_init"), - Opcode::MemoryOp { .. } => self.opcodes.contains("memory_op"), - Opcode::BlackBoxFuncCall(func) => { - self.black_box_functions.contains(func.get_black_box_func().name()) - } - } - } - - pub fn all() -> BackendOpcodeSupport { - BackendOpcodeSupport { - opcodes: HashSet::from([ - "arithmetic".to_string(), - "directive".to_string(), - "brillig".to_string(), - "memory_init".to_string(), - "memory_op".to_string(), - ]), - black_box_functions: HashSet::from([ - "sha256".to_string(), - "schnorr_verify".to_string(), - "blake2s".to_string(), - "pedersen".to_string(), - "pedersen_hash".to_string(), - "hash_to_field_128_security".to_string(), - "ecdsa_secp256k1".to_string(), - "fixed_base_scalar_mul".to_string(), - "and".to_string(), - "xor".to_string(), - "range".to_string(), - "keccak256".to_string(), - "recursive_aggregation".to_string(), - "ecdsa_secp256r1".to_string(), - ]), - } - } -} - #[cfg(test)] mod backend { use crate::{Backend, BackendError}; #[test] fn raises_error_on_missing_binary() { - let bad_backend = Backend::new("i_dont_exist".to_string()); + let bad_backend = Backend::new("i_don't_exist".to_string()); let binary_path = bad_backend.assert_binary_exists(); diff --git a/tooling/backend_interface/src/proof_system.rs b/tooling/backend_interface/src/proof_system.rs index bb47603bbf7..595cd7e2020 100644 --- a/tooling/backend_interface/src/proof_system.rs +++ b/tooling/backend_interface/src/proof_system.rs @@ -3,15 +3,16 @@ use std::io::Write; use std::path::Path; use acvm::acir::{circuit::Circuit, native_types::WitnessMap}; +use acvm::ExpressionWidth; use acvm::FieldElement; -use acvm::Language; use tempfile::tempdir; +use tracing::warn; use crate::cli::{ GatesCommand, InfoCommand, ProofAsFieldsCommand, ProveCommand, VerifyCommand, VkAsFieldsCommand, WriteVkCommand, }; -use crate::{Backend, BackendError, BackendOpcodeSupport}; +use crate::{Backend, BackendError}; impl Backend { pub fn get_exact_circuit_size(&self, circuit: &Circuit) -> Result { @@ -30,24 +31,26 @@ impl Backend { .run(binary_path) } - pub fn get_backend_info(&self) -> Result<(Language, BackendOpcodeSupport), BackendError> { + pub fn get_backend_info(&self) -> Result { let binary_path = self.assert_binary_exists()?; self.assert_correct_version()?; InfoCommand { crs_path: self.crs_directory() }.run(binary_path) } - /// If we cannot get a valid backend, returns the default backend which supports all the opcodes - /// and uses Plonk with width 3 + /// If we cannot get a valid backend, returns `ExpressionWidth::Bound { width: 3 }`` /// The function also prints a message saying we could not find a backend - pub fn get_backend_info_or_default(&self) -> (Language, BackendOpcodeSupport) { - if let Ok(backend_info) = self.get_backend_info() { - (backend_info.0, backend_info.1) + pub fn get_backend_info_or_default(&self) -> ExpressionWidth { + if let Ok(expression_width) = self.get_backend_info() { + expression_width } else { - log::warn!("No valid backend found, defaulting to Plonk with width 3 and all opcodes supported"); - (Language::PLONKCSat { width: 3 }, BackendOpcodeSupport::all()) + warn!( + "No valid backend found, ExpressionWidth defaulting to Bounded with a width of 3" + ); + ExpressionWidth::Bounded { width: 3 } } } + #[tracing::instrument(level = "trace", skip_all)] pub fn prove( &self, circuit: &Circuit, @@ -88,6 +91,7 @@ impl Backend { Ok(proof) } + #[tracing::instrument(level = "trace", skip_all)] pub fn verify( &self, proof: &[u8], diff --git a/tooling/backend_interface/src/smart_contract.rs b/tooling/backend_interface/src/smart_contract.rs index 5dac57c4072..2548079f8e3 100644 --- a/tooling/backend_interface/src/smart_contract.rs +++ b/tooling/backend_interface/src/smart_contract.rs @@ -47,7 +47,7 @@ mod tests { #[test] fn test_smart_contract() -> Result<(), BackendError> { let expression = &(Witness(1) + Witness(2)) - &Expression::from(Witness(3)); - let constraint = Opcode::Arithmetic(expression); + let constraint = Opcode::AssertZero(expression); let circuit = Circuit { current_witness_index: 4, diff --git a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock b/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock index c43d1b84915..3c14a936907 100644 --- a/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock +++ b/tooling/backend_interface/test-binaries/mock_backend/Cargo.lock @@ -4,81 +4,67 @@ version = 3 [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "6e2e1ebcb11de5c03c67de28a7df593d32191b44939c482e97702baaaa6ab6a5" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a30da5c5f2d5e72842e00bcb57657162cdabef0931f40e2deb9b4140440cecd" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ "windows-sys", ] [[package]] name = "anstyle-wincon" -version = "1.0.1" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180abfa45703aebe0093f79badacc01b8fd4ea2e35118747e5811127f926e188" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", "windows-sys", ] -[[package]] -name = "bitflags" -version = "2.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630be753d4e58660abd17930c71b647fe46c27ea6b63cc59e1e3851406972e42" - -[[package]] -name = "cc" -version = "1.0.79" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" - [[package]] name = "clap" -version = "4.3.19" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd304a20bff958a57f04c4e96a2e7594cc4490a0e809cbd48bb6437edaa452d" +checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.19" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01c6a3f08f1fe5662a35cfe393aec09c4df95f60ee93b7556505260f75eee9e1" +checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" dependencies = [ "anstream", "anstyle", @@ -88,9 +74,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", @@ -100,9 +86,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "colorchoice" @@ -110,62 +96,12 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" -[[package]] -name = "errno" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bcfec3a70f97c962c307b2d2c56e358cf1d00b558d74262b5f929ee8cc7e73a" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" -dependencies = [ - "cc", - "libc", -] - [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" -[[package]] -name = "hermit-abi" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" - -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix", - "windows-sys", -] - -[[package]] -name = "libc" -version = "0.2.147" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" - -[[package]] -name = "linux-raw-sys" -version = "0.4.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09fc20d2ca12cb9f044c93e3bd6d32d523e6e2ec3db4f7b2939cd99026ecd3f0" - [[package]] name = "mock_backend" version = "0.1.0" @@ -173,43 +109,24 @@ dependencies = [ "clap", ] -[[package]] -name = "once_cell" -version = "1.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" - [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.31" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fe8a65d69dd0808184ebb5f836ab526bb259db23c657efa38711b1072ee47f0" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] -[[package]] -name = "rustix" -version = "0.38.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a962918ea88d644592894bc6dc55acc6c0956488adcebbfb6e273506b7fd6e5" -dependencies = [ - "bitflags", - "errno", - "libc", - "linux-raw-sys", - "windows-sys", -] - [[package]] name = "strsim" version = "0.10.0" @@ -218,9 +135,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "syn" -version = "2.0.26" +version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3457aacde3c65315de5031ec191ce46604304d2446e803d71ade03308d970" +checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", @@ -229,9 +146,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "utf8parse" @@ -241,18 +158,18 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" -version = "0.48.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05d4b17490f70499f20b9e791dcf6a299785ce8af4d709018206dc5b4953e95f" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", @@ -265,42 +182,42 @@ dependencies = [ [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" -version = "0.48.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" diff --git a/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs b/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs index e9a7842ba24..fd8cf602125 100644 --- a/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs +++ b/tooling/backend_interface/test-binaries/mock_backend/src/info_cmd.rs @@ -14,11 +14,11 @@ const INFO_RESPONSE: &str = r#"{ "range", "sha256", "blake2s", + "blake3", "keccak256", "schnorr_verify", "pedersen", "pedersen_hash", - "hash_to_field_128_security", "ecdsa_secp256k1", "ecdsa_secp256r1", "fixed_base_scalar_mul", diff --git a/tooling/bb_abstraction_leaks/build.rs b/tooling/bb_abstraction_leaks/build.rs index 166e61a5a97..18413f87793 100644 --- a/tooling/bb_abstraction_leaks/build.rs +++ b/tooling/bb_abstraction_leaks/build.rs @@ -10,7 +10,7 @@ use const_format::formatcp; const USERNAME: &str = "AztecProtocol"; const REPO: &str = "aztec-packages"; -const VERSION: &str = "0.16.0"; +const VERSION: &str = "0.19.0"; const TAG: &str = formatcp!("aztec-packages-v{}", VERSION); const API_URL: &str = diff --git a/tooling/bb_abstraction_leaks/src/contract.sol b/tooling/bb_abstraction_leaks/src/contract.sol deleted file mode 100644 index 814c81d235e..00000000000 --- a/tooling/bb_abstraction_leaks/src/contract.sol +++ /dev/null @@ -1,2575 +0,0 @@ -/** - * @title Ultra Plonk proof verification contract - * @dev Top level Plonk proof verification contract, which allows Plonk proof to be verified - */ -abstract contract BaseUltraVerifier { - // VERIFICATION KEY MEMORY LOCATIONS - uint256 internal constant N_LOC = 0x380; - uint256 internal constant NUM_INPUTS_LOC = 0x3a0; - uint256 internal constant OMEGA_LOC = 0x3c0; - uint256 internal constant DOMAIN_INVERSE_LOC = 0x3e0; - uint256 internal constant Q1_X_LOC = 0x400; - uint256 internal constant Q1_Y_LOC = 0x420; - uint256 internal constant Q2_X_LOC = 0x440; - uint256 internal constant Q2_Y_LOC = 0x460; - uint256 internal constant Q3_X_LOC = 0x480; - uint256 internal constant Q3_Y_LOC = 0x4a0; - uint256 internal constant Q4_X_LOC = 0x4c0; - uint256 internal constant Q4_Y_LOC = 0x4e0; - uint256 internal constant QM_X_LOC = 0x500; - uint256 internal constant QM_Y_LOC = 0x520; - uint256 internal constant QC_X_LOC = 0x540; - uint256 internal constant QC_Y_LOC = 0x560; - uint256 internal constant QARITH_X_LOC = 0x580; - uint256 internal constant QARITH_Y_LOC = 0x5a0; - uint256 internal constant QSORT_X_LOC = 0x5c0; - uint256 internal constant QSORT_Y_LOC = 0x5e0; - uint256 internal constant QELLIPTIC_X_LOC = 0x600; - uint256 internal constant QELLIPTIC_Y_LOC = 0x620; - uint256 internal constant QAUX_X_LOC = 0x640; - uint256 internal constant QAUX_Y_LOC = 0x660; - uint256 internal constant SIGMA1_X_LOC = 0x680; - uint256 internal constant SIGMA1_Y_LOC = 0x6a0; - uint256 internal constant SIGMA2_X_LOC = 0x6c0; - uint256 internal constant SIGMA2_Y_LOC = 0x6e0; - uint256 internal constant SIGMA3_X_LOC = 0x700; - uint256 internal constant SIGMA3_Y_LOC = 0x720; - uint256 internal constant SIGMA4_X_LOC = 0x740; - uint256 internal constant SIGMA4_Y_LOC = 0x760; - uint256 internal constant TABLE1_X_LOC = 0x780; - uint256 internal constant TABLE1_Y_LOC = 0x7a0; - uint256 internal constant TABLE2_X_LOC = 0x7c0; - uint256 internal constant TABLE2_Y_LOC = 0x7e0; - uint256 internal constant TABLE3_X_LOC = 0x800; - uint256 internal constant TABLE3_Y_LOC = 0x820; - uint256 internal constant TABLE4_X_LOC = 0x840; - uint256 internal constant TABLE4_Y_LOC = 0x860; - uint256 internal constant TABLE_TYPE_X_LOC = 0x880; - uint256 internal constant TABLE_TYPE_Y_LOC = 0x8a0; - uint256 internal constant ID1_X_LOC = 0x8c0; - uint256 internal constant ID1_Y_LOC = 0x8e0; - uint256 internal constant ID2_X_LOC = 0x900; - uint256 internal constant ID2_Y_LOC = 0x920; - uint256 internal constant ID3_X_LOC = 0x940; - uint256 internal constant ID3_Y_LOC = 0x960; - uint256 internal constant ID4_X_LOC = 0x980; - uint256 internal constant ID4_Y_LOC = 0x9a0; - uint256 internal constant CONTAINS_RECURSIVE_PROOF_LOC = 0x9c0; - uint256 internal constant RECURSIVE_PROOF_PUBLIC_INPUT_INDICES_LOC = 0x9e0; - uint256 internal constant G2X_X0_LOC = 0xa00; - uint256 internal constant G2X_X1_LOC = 0xa20; - uint256 internal constant G2X_Y0_LOC = 0xa40; - uint256 internal constant G2X_Y1_LOC = 0xa60; - - // ### PROOF DATA MEMORY LOCATIONS - uint256 internal constant W1_X_LOC = 0x1200; - uint256 internal constant W1_Y_LOC = 0x1220; - uint256 internal constant W2_X_LOC = 0x1240; - uint256 internal constant W2_Y_LOC = 0x1260; - uint256 internal constant W3_X_LOC = 0x1280; - uint256 internal constant W3_Y_LOC = 0x12a0; - uint256 internal constant W4_X_LOC = 0x12c0; - uint256 internal constant W4_Y_LOC = 0x12e0; - uint256 internal constant S_X_LOC = 0x1300; - uint256 internal constant S_Y_LOC = 0x1320; - uint256 internal constant Z_X_LOC = 0x1340; - uint256 internal constant Z_Y_LOC = 0x1360; - uint256 internal constant Z_LOOKUP_X_LOC = 0x1380; - uint256 internal constant Z_LOOKUP_Y_LOC = 0x13a0; - uint256 internal constant T1_X_LOC = 0x13c0; - uint256 internal constant T1_Y_LOC = 0x13e0; - uint256 internal constant T2_X_LOC = 0x1400; - uint256 internal constant T2_Y_LOC = 0x1420; - uint256 internal constant T3_X_LOC = 0x1440; - uint256 internal constant T3_Y_LOC = 0x1460; - uint256 internal constant T4_X_LOC = 0x1480; - uint256 internal constant T4_Y_LOC = 0x14a0; - - uint256 internal constant W1_EVAL_LOC = 0x1600; - uint256 internal constant W2_EVAL_LOC = 0x1620; - uint256 internal constant W3_EVAL_LOC = 0x1640; - uint256 internal constant W4_EVAL_LOC = 0x1660; - uint256 internal constant S_EVAL_LOC = 0x1680; - uint256 internal constant Z_EVAL_LOC = 0x16a0; - uint256 internal constant Z_LOOKUP_EVAL_LOC = 0x16c0; - uint256 internal constant Q1_EVAL_LOC = 0x16e0; - uint256 internal constant Q2_EVAL_LOC = 0x1700; - uint256 internal constant Q3_EVAL_LOC = 0x1720; - uint256 internal constant Q4_EVAL_LOC = 0x1740; - uint256 internal constant QM_EVAL_LOC = 0x1760; - uint256 internal constant QC_EVAL_LOC = 0x1780; - uint256 internal constant QARITH_EVAL_LOC = 0x17a0; - uint256 internal constant QSORT_EVAL_LOC = 0x17c0; - uint256 internal constant QELLIPTIC_EVAL_LOC = 0x17e0; - uint256 internal constant QAUX_EVAL_LOC = 0x1800; - uint256 internal constant TABLE1_EVAL_LOC = 0x1840; - uint256 internal constant TABLE2_EVAL_LOC = 0x1860; - uint256 internal constant TABLE3_EVAL_LOC = 0x1880; - uint256 internal constant TABLE4_EVAL_LOC = 0x18a0; - uint256 internal constant TABLE_TYPE_EVAL_LOC = 0x18c0; - uint256 internal constant ID1_EVAL_LOC = 0x18e0; - uint256 internal constant ID2_EVAL_LOC = 0x1900; - uint256 internal constant ID3_EVAL_LOC = 0x1920; - uint256 internal constant ID4_EVAL_LOC = 0x1940; - uint256 internal constant SIGMA1_EVAL_LOC = 0x1960; - uint256 internal constant SIGMA2_EVAL_LOC = 0x1980; - uint256 internal constant SIGMA3_EVAL_LOC = 0x19a0; - uint256 internal constant SIGMA4_EVAL_LOC = 0x19c0; - uint256 internal constant W1_OMEGA_EVAL_LOC = 0x19e0; - uint256 internal constant W2_OMEGA_EVAL_LOC = 0x2000; - uint256 internal constant W3_OMEGA_EVAL_LOC = 0x2020; - uint256 internal constant W4_OMEGA_EVAL_LOC = 0x2040; - uint256 internal constant S_OMEGA_EVAL_LOC = 0x2060; - uint256 internal constant Z_OMEGA_EVAL_LOC = 0x2080; - uint256 internal constant Z_LOOKUP_OMEGA_EVAL_LOC = 0x20a0; - uint256 internal constant TABLE1_OMEGA_EVAL_LOC = 0x20c0; - uint256 internal constant TABLE2_OMEGA_EVAL_LOC = 0x20e0; - uint256 internal constant TABLE3_OMEGA_EVAL_LOC = 0x2100; - uint256 internal constant TABLE4_OMEGA_EVAL_LOC = 0x2120; - - uint256 internal constant PI_Z_X_LOC = 0x2300; - uint256 internal constant PI_Z_Y_LOC = 0x2320; - uint256 internal constant PI_Z_OMEGA_X_LOC = 0x2340; - uint256 internal constant PI_Z_OMEGA_Y_LOC = 0x2360; - - // Used for elliptic widget. These are alias names for wire + shifted wire evaluations - uint256 internal constant X1_EVAL_LOC = W2_EVAL_LOC; - uint256 internal constant X2_EVAL_LOC = W1_OMEGA_EVAL_LOC; - uint256 internal constant X3_EVAL_LOC = W2_OMEGA_EVAL_LOC; - uint256 internal constant Y1_EVAL_LOC = W3_EVAL_LOC; - uint256 internal constant Y2_EVAL_LOC = W4_OMEGA_EVAL_LOC; - uint256 internal constant Y3_EVAL_LOC = W3_OMEGA_EVAL_LOC; - uint256 internal constant QBETA_LOC = Q3_EVAL_LOC; - uint256 internal constant QBETA_SQR_LOC = Q4_EVAL_LOC; - uint256 internal constant QSIGN_LOC = Q1_EVAL_LOC; - - // ### CHALLENGES MEMORY OFFSETS - - uint256 internal constant C_BETA_LOC = 0x2600; - uint256 internal constant C_GAMMA_LOC = 0x2620; - uint256 internal constant C_ALPHA_LOC = 0x2640; - uint256 internal constant C_ETA_LOC = 0x2660; - uint256 internal constant C_ETA_SQR_LOC = 0x2680; - uint256 internal constant C_ETA_CUBE_LOC = 0x26a0; - - uint256 internal constant C_ZETA_LOC = 0x26c0; - uint256 internal constant C_CURRENT_LOC = 0x26e0; - uint256 internal constant C_V0_LOC = 0x2700; - uint256 internal constant C_V1_LOC = 0x2720; - uint256 internal constant C_V2_LOC = 0x2740; - uint256 internal constant C_V3_LOC = 0x2760; - uint256 internal constant C_V4_LOC = 0x2780; - uint256 internal constant C_V5_LOC = 0x27a0; - uint256 internal constant C_V6_LOC = 0x27c0; - uint256 internal constant C_V7_LOC = 0x27e0; - uint256 internal constant C_V8_LOC = 0x2800; - uint256 internal constant C_V9_LOC = 0x2820; - uint256 internal constant C_V10_LOC = 0x2840; - uint256 internal constant C_V11_LOC = 0x2860; - uint256 internal constant C_V12_LOC = 0x2880; - uint256 internal constant C_V13_LOC = 0x28a0; - uint256 internal constant C_V14_LOC = 0x28c0; - uint256 internal constant C_V15_LOC = 0x28e0; - uint256 internal constant C_V16_LOC = 0x2900; - uint256 internal constant C_V17_LOC = 0x2920; - uint256 internal constant C_V18_LOC = 0x2940; - uint256 internal constant C_V19_LOC = 0x2960; - uint256 internal constant C_V20_LOC = 0x2980; - uint256 internal constant C_V21_LOC = 0x29a0; - uint256 internal constant C_V22_LOC = 0x29c0; - uint256 internal constant C_V23_LOC = 0x29e0; - uint256 internal constant C_V24_LOC = 0x2a00; - uint256 internal constant C_V25_LOC = 0x2a20; - uint256 internal constant C_V26_LOC = 0x2a40; - uint256 internal constant C_V27_LOC = 0x2a60; - uint256 internal constant C_V28_LOC = 0x2a80; - uint256 internal constant C_V29_LOC = 0x2aa0; - uint256 internal constant C_V30_LOC = 0x2ac0; - - uint256 internal constant C_U_LOC = 0x2b00; - - // ### LOCAL VARIABLES MEMORY OFFSETS - uint256 internal constant DELTA_NUMERATOR_LOC = 0x3000; - uint256 internal constant DELTA_DENOMINATOR_LOC = 0x3020; - uint256 internal constant ZETA_POW_N_LOC = 0x3040; - uint256 internal constant PUBLIC_INPUT_DELTA_LOC = 0x3060; - uint256 internal constant ZERO_POLY_LOC = 0x3080; - uint256 internal constant L_START_LOC = 0x30a0; - uint256 internal constant L_END_LOC = 0x30c0; - uint256 internal constant R_ZERO_EVAL_LOC = 0x30e0; - - uint256 internal constant PLOOKUP_DELTA_NUMERATOR_LOC = 0x3100; - uint256 internal constant PLOOKUP_DELTA_DENOMINATOR_LOC = 0x3120; - uint256 internal constant PLOOKUP_DELTA_LOC = 0x3140; - - uint256 internal constant ACCUMULATOR_X_LOC = 0x3160; - uint256 internal constant ACCUMULATOR_Y_LOC = 0x3180; - uint256 internal constant ACCUMULATOR2_X_LOC = 0x31a0; - uint256 internal constant ACCUMULATOR2_Y_LOC = 0x31c0; - uint256 internal constant PAIRING_LHS_X_LOC = 0x31e0; - uint256 internal constant PAIRING_LHS_Y_LOC = 0x3200; - uint256 internal constant PAIRING_RHS_X_LOC = 0x3220; - uint256 internal constant PAIRING_RHS_Y_LOC = 0x3240; - - // ### SUCCESS FLAG MEMORY LOCATIONS - uint256 internal constant GRAND_PRODUCT_SUCCESS_FLAG = 0x3300; - uint256 internal constant ARITHMETIC_TERM_SUCCESS_FLAG = 0x3020; - uint256 internal constant BATCH_OPENING_SUCCESS_FLAG = 0x3340; - uint256 internal constant OPENING_COMMITMENT_SUCCESS_FLAG = 0x3360; - uint256 internal constant PAIRING_PREAMBLE_SUCCESS_FLAG = 0x3380; - uint256 internal constant PAIRING_SUCCESS_FLAG = 0x33a0; - uint256 internal constant RESULT_FLAG = 0x33c0; - - // misc stuff - uint256 internal constant OMEGA_INVERSE_LOC = 0x3400; - uint256 internal constant C_ALPHA_SQR_LOC = 0x3420; - uint256 internal constant C_ALPHA_CUBE_LOC = 0x3440; - uint256 internal constant C_ALPHA_QUAD_LOC = 0x3460; - uint256 internal constant C_ALPHA_BASE_LOC = 0x3480; - - // ### RECURSION VARIABLE MEMORY LOCATIONS - uint256 internal constant RECURSIVE_P1_X_LOC = 0x3500; - uint256 internal constant RECURSIVE_P1_Y_LOC = 0x3520; - uint256 internal constant RECURSIVE_P2_X_LOC = 0x3540; - uint256 internal constant RECURSIVE_P2_Y_LOC = 0x3560; - - uint256 internal constant PUBLIC_INPUTS_HASH_LOCATION = 0x3580; - - // sub-identity storage - uint256 internal constant PERMUTATION_IDENTITY = 0x3600; - uint256 internal constant PLOOKUP_IDENTITY = 0x3620; - uint256 internal constant ARITHMETIC_IDENTITY = 0x3640; - uint256 internal constant SORT_IDENTITY = 0x3660; - uint256 internal constant ELLIPTIC_IDENTITY = 0x3680; - uint256 internal constant AUX_IDENTITY = 0x36a0; - uint256 internal constant AUX_NON_NATIVE_FIELD_EVALUATION = 0x36c0; - uint256 internal constant AUX_LIMB_ACCUMULATOR_EVALUATION = 0x36e0; - uint256 internal constant AUX_RAM_CONSISTENCY_EVALUATION = 0x3700; - uint256 internal constant AUX_ROM_CONSISTENCY_EVALUATION = 0x3720; - uint256 internal constant AUX_MEMORY_EVALUATION = 0x3740; - - uint256 internal constant QUOTIENT_EVAL_LOC = 0x3760; - uint256 internal constant ZERO_POLY_INVERSE_LOC = 0x3780; - - // when hashing public inputs we use memory at NU_CHALLENGE_INPUT_LOC_A, as the hash input size is unknown at compile time - uint256 internal constant NU_CHALLENGE_INPUT_LOC_A = 0x37a0; - uint256 internal constant NU_CHALLENGE_INPUT_LOC_B = 0x37c0; - uint256 internal constant NU_CHALLENGE_INPUT_LOC_C = 0x37e0; - - bytes4 internal constant PUBLIC_INPUT_INVALID_BN128_G1_POINT_SELECTOR = 0xeba9f4a6; - bytes4 internal constant PUBLIC_INPUT_GE_P_SELECTOR = 0x374a972f; - bytes4 internal constant MOD_EXP_FAILURE_SELECTOR = 0xf894a7bc; - bytes4 internal constant EC_SCALAR_MUL_FAILURE_SELECTOR = 0xf755f369; - bytes4 internal constant PROOF_FAILURE_SELECTOR = 0x0711fcec; - - uint256 internal constant ETA_INPUT_LENGTH = 0xc0; // W1, W2, W3 = 6 * 0x20 bytes - - // We need to hash 41 field elements when generating the NU challenge - // w1, w2, w3, w4, s, z, z_lookup, q1, q2, q3, q4, qm, qc, qarith (14) - // qsort, qelliptic, qaux, sigma1, sigma2, sigma, sigma4, (7) - // table1, table2, table3, table4, tabletype, id1, id2, id3, id4, (9) - // w1_omega, w2_omega, w3_omega, w4_omega, s_omega, z_omega, z_lookup_omega, (7) - // table1_omega, table2_omega, table3_omega, table4_omega (4) - uint256 internal constant NU_INPUT_LENGTH = 0x520; // 0x520 = 41 * 0x20 - - // There are ELEVEN G1 group elements added into the transcript in the `beta` round, that we need to skip over - // W1, W2, W3, W4, S, Z, Z_LOOKUP, T1, T2, T3, T4 - uint256 internal constant NU_CALLDATA_SKIP_LENGTH = 0x2c0; // 11 * 0x40 = 0x2c0 - - uint256 internal constant NEGATIVE_INVERSE_OF_2_MODULO_P = - 0x183227397098d014dc2822db40c0ac2e9419f4243cdcb848a1f0fac9f8000000; - uint256 internal constant LIMB_SIZE = 0x100000000000000000; // 2<<68 - uint256 internal constant SUBLIMB_SHIFT = 0x4000; // 2<<14 - - // y^2 = x^3 + ax + b - // for Grumpkin, a = 0 and b = -17. We use b in a custom gate relation that evaluates elliptic curve arithmetic - uint256 internal constant GRUMPKIN_CURVE_B_PARAMETER_NEGATED = 17; - error PUBLIC_INPUT_COUNT_INVALID(uint256 expected, uint256 actual); - error PUBLIC_INPUT_INVALID_BN128_G1_POINT(); - error PUBLIC_INPUT_GE_P(); - error MOD_EXP_FAILURE(); - error EC_SCALAR_MUL_FAILURE(); - error PROOF_FAILURE(); - - function getVerificationKeyHash() public pure virtual returns (bytes32); - - function loadVerificationKey(uint256 _vk, uint256 _omegaInverseLoc) internal pure virtual; - - /** - * @notice Verify a Ultra Plonk proof - * @param _proof - The serialized proof - * @param _publicInputs - An array of the public inputs - * @return True if proof is valid, reverts otherwise - */ - function verify(bytes calldata _proof, bytes32[] calldata _publicInputs) external view returns (bool) { - loadVerificationKey(N_LOC, OMEGA_INVERSE_LOC); - - uint256 requiredPublicInputCount; - assembly { - requiredPublicInputCount := mload(NUM_INPUTS_LOC) - } - if (requiredPublicInputCount != _publicInputs.length) { - revert PUBLIC_INPUT_COUNT_INVALID(requiredPublicInputCount, _publicInputs.length); - } - - assembly { - let q := 21888242871839275222246405745257275088696311157297823662689037894645226208583 // EC group order - let p := 21888242871839275222246405745257275088548364400416034343698204186575808495617 // Prime field order - - /** - * LOAD PROOF FROM CALLDATA - */ - { - let data_ptr := add(calldataload(0x04), 0x24) - - mstore(W1_Y_LOC, mod(calldataload(data_ptr), q)) - mstore(W1_X_LOC, mod(calldataload(add(data_ptr, 0x20)), q)) - - mstore(W2_Y_LOC, mod(calldataload(add(data_ptr, 0x40)), q)) - mstore(W2_X_LOC, mod(calldataload(add(data_ptr, 0x60)), q)) - - mstore(W3_Y_LOC, mod(calldataload(add(data_ptr, 0x80)), q)) - mstore(W3_X_LOC, mod(calldataload(add(data_ptr, 0xa0)), q)) - - mstore(W4_Y_LOC, mod(calldataload(add(data_ptr, 0xc0)), q)) - mstore(W4_X_LOC, mod(calldataload(add(data_ptr, 0xe0)), q)) - - mstore(S_Y_LOC, mod(calldataload(add(data_ptr, 0x100)), q)) - mstore(S_X_LOC, mod(calldataload(add(data_ptr, 0x120)), q)) - mstore(Z_Y_LOC, mod(calldataload(add(data_ptr, 0x140)), q)) - mstore(Z_X_LOC, mod(calldataload(add(data_ptr, 0x160)), q)) - mstore(Z_LOOKUP_Y_LOC, mod(calldataload(add(data_ptr, 0x180)), q)) - mstore(Z_LOOKUP_X_LOC, mod(calldataload(add(data_ptr, 0x1a0)), q)) - mstore(T1_Y_LOC, mod(calldataload(add(data_ptr, 0x1c0)), q)) - mstore(T1_X_LOC, mod(calldataload(add(data_ptr, 0x1e0)), q)) - - mstore(T2_Y_LOC, mod(calldataload(add(data_ptr, 0x200)), q)) - mstore(T2_X_LOC, mod(calldataload(add(data_ptr, 0x220)), q)) - - mstore(T3_Y_LOC, mod(calldataload(add(data_ptr, 0x240)), q)) - mstore(T3_X_LOC, mod(calldataload(add(data_ptr, 0x260)), q)) - - mstore(T4_Y_LOC, mod(calldataload(add(data_ptr, 0x280)), q)) - mstore(T4_X_LOC, mod(calldataload(add(data_ptr, 0x2a0)), q)) - - mstore(W1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x2c0)), p)) - mstore(W2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x2e0)), p)) - mstore(W3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x300)), p)) - mstore(W4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x320)), p)) - mstore(S_EVAL_LOC, mod(calldataload(add(data_ptr, 0x340)), p)) - mstore(Z_EVAL_LOC, mod(calldataload(add(data_ptr, 0x360)), p)) - mstore(Z_LOOKUP_EVAL_LOC, mod(calldataload(add(data_ptr, 0x380)), p)) - mstore(Q1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3a0)), p)) - mstore(Q2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3c0)), p)) - mstore(Q3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x3e0)), p)) - mstore(Q4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x400)), p)) - mstore(QM_EVAL_LOC, mod(calldataload(add(data_ptr, 0x420)), p)) - mstore(QC_EVAL_LOC, mod(calldataload(add(data_ptr, 0x440)), p)) - mstore(QARITH_EVAL_LOC, mod(calldataload(add(data_ptr, 0x460)), p)) - mstore(QSORT_EVAL_LOC, mod(calldataload(add(data_ptr, 0x480)), p)) - mstore(QELLIPTIC_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4a0)), p)) - mstore(QAUX_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4c0)), p)) - - mstore(SIGMA1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x4e0)), p)) - mstore(SIGMA2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x500)), p)) - - mstore(SIGMA3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x520)), p)) - mstore(SIGMA4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x540)), p)) - - mstore(TABLE1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x560)), p)) - mstore(TABLE2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x580)), p)) - mstore(TABLE3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5a0)), p)) - mstore(TABLE4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5c0)), p)) - mstore(TABLE_TYPE_EVAL_LOC, mod(calldataload(add(data_ptr, 0x5e0)), p)) - - mstore(ID1_EVAL_LOC, mod(calldataload(add(data_ptr, 0x600)), p)) - mstore(ID2_EVAL_LOC, mod(calldataload(add(data_ptr, 0x620)), p)) - mstore(ID3_EVAL_LOC, mod(calldataload(add(data_ptr, 0x640)), p)) - mstore(ID4_EVAL_LOC, mod(calldataload(add(data_ptr, 0x660)), p)) - - mstore(W1_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x680)), p)) - mstore(W2_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6a0)), p)) - mstore(W3_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6c0)), p)) - mstore(W4_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x6e0)), p)) - mstore(S_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x700)), p)) - - mstore(Z_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x720)), p)) - - mstore(Z_LOOKUP_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x740)), p)) - mstore(TABLE1_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x760)), p)) - mstore(TABLE2_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x780)), p)) - mstore(TABLE3_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x7a0)), p)) - mstore(TABLE4_OMEGA_EVAL_LOC, mod(calldataload(add(data_ptr, 0x7c0)), p)) - - mstore(PI_Z_Y_LOC, mod(calldataload(add(data_ptr, 0x7e0)), q)) - mstore(PI_Z_X_LOC, mod(calldataload(add(data_ptr, 0x800)), q)) - - mstore(PI_Z_OMEGA_Y_LOC, mod(calldataload(add(data_ptr, 0x820)), q)) - mstore(PI_Z_OMEGA_X_LOC, mod(calldataload(add(data_ptr, 0x840)), q)) - } - - /** - * LOAD RECURSIVE PROOF INTO MEMORY - */ - { - if mload(CONTAINS_RECURSIVE_PROOF_LOC) { - let public_inputs_ptr := add(calldataload(0x24), 0x24) - let index_counter := add(shl(5, mload(RECURSIVE_PROOF_PUBLIC_INPUT_INDICES_LOC)), public_inputs_ptr) - - let x0 := calldataload(index_counter) - x0 := add(x0, shl(68, calldataload(add(index_counter, 0x20)))) - x0 := add(x0, shl(136, calldataload(add(index_counter, 0x40)))) - x0 := add(x0, shl(204, calldataload(add(index_counter, 0x60)))) - let y0 := calldataload(add(index_counter, 0x80)) - y0 := add(y0, shl(68, calldataload(add(index_counter, 0xa0)))) - y0 := add(y0, shl(136, calldataload(add(index_counter, 0xc0)))) - y0 := add(y0, shl(204, calldataload(add(index_counter, 0xe0)))) - let x1 := calldataload(add(index_counter, 0x100)) - x1 := add(x1, shl(68, calldataload(add(index_counter, 0x120)))) - x1 := add(x1, shl(136, calldataload(add(index_counter, 0x140)))) - x1 := add(x1, shl(204, calldataload(add(index_counter, 0x160)))) - let y1 := calldataload(add(index_counter, 0x180)) - y1 := add(y1, shl(68, calldataload(add(index_counter, 0x1a0)))) - y1 := add(y1, shl(136, calldataload(add(index_counter, 0x1c0)))) - y1 := add(y1, shl(204, calldataload(add(index_counter, 0x1e0)))) - mstore(RECURSIVE_P1_X_LOC, x0) - mstore(RECURSIVE_P1_Y_LOC, y0) - mstore(RECURSIVE_P2_X_LOC, x1) - mstore(RECURSIVE_P2_Y_LOC, y1) - - // validate these are valid bn128 G1 points - if iszero(and(and(lt(x0, q), lt(x1, q)), and(lt(y0, q), lt(y1, q)))) { - mstore(0x00, PUBLIC_INPUT_INVALID_BN128_G1_POINT_SELECTOR) - revert(0x00, 0x04) - } - } - } - - { - /** - * Generate initial challenge - */ - mstore(0x00, shl(224, mload(N_LOC))) - mstore(0x04, shl(224, mload(NUM_INPUTS_LOC))) - let challenge := keccak256(0x00, 0x08) - - /** - * Generate eta challenge - */ - mstore(PUBLIC_INPUTS_HASH_LOCATION, challenge) - // The public input location is stored at 0x24, we then add 0x24 to skip selector and the length of public inputs - let public_inputs_start := add(calldataload(0x24), 0x24) - // copy the public inputs over - let public_input_size := mul(mload(NUM_INPUTS_LOC), 0x20) - calldatacopy(add(PUBLIC_INPUTS_HASH_LOCATION, 0x20), public_inputs_start, public_input_size) - - // copy W1, W2, W3 into challenge. Each point is 0x40 bytes, so load 0xc0 = 3 * 0x40 bytes (ETA input length) - let w_start := add(calldataload(0x04), 0x24) - calldatacopy(add(add(PUBLIC_INPUTS_HASH_LOCATION, 0x20), public_input_size), w_start, ETA_INPUT_LENGTH) - - // Challenge is the old challenge + public inputs + W1, W2, W3 (0x20 + public_input_size + 0xc0) - let challenge_bytes_size := add(0x20, add(public_input_size, ETA_INPUT_LENGTH)) - - challenge := keccak256(PUBLIC_INPUTS_HASH_LOCATION, challenge_bytes_size) - { - let eta := mod(challenge, p) - mstore(C_ETA_LOC, eta) - mstore(C_ETA_SQR_LOC, mulmod(eta, eta, p)) - mstore(C_ETA_CUBE_LOC, mulmod(mload(C_ETA_SQR_LOC), eta, p)) - } - - /** - * Generate beta challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(W4_Y_LOC)) - mstore(0x40, mload(W4_X_LOC)) - mstore(0x60, mload(S_Y_LOC)) - mstore(0x80, mload(S_X_LOC)) - challenge := keccak256(0x00, 0xa0) - mstore(C_BETA_LOC, mod(challenge, p)) - - /** - * Generate gamma challenge - */ - mstore(0x00, challenge) - mstore8(0x20, 0x01) - challenge := keccak256(0x00, 0x21) - mstore(C_GAMMA_LOC, mod(challenge, p)) - - /** - * Generate alpha challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(Z_Y_LOC)) - mstore(0x40, mload(Z_X_LOC)) - mstore(0x60, mload(Z_LOOKUP_Y_LOC)) - mstore(0x80, mload(Z_LOOKUP_X_LOC)) - challenge := keccak256(0x00, 0xa0) - mstore(C_ALPHA_LOC, mod(challenge, p)) - - /** - * Compute and store some powers of alpha for future computations - */ - let alpha := mload(C_ALPHA_LOC) - mstore(C_ALPHA_SQR_LOC, mulmod(alpha, alpha, p)) - mstore(C_ALPHA_CUBE_LOC, mulmod(mload(C_ALPHA_SQR_LOC), alpha, p)) - mstore(C_ALPHA_QUAD_LOC, mulmod(mload(C_ALPHA_CUBE_LOC), alpha, p)) - mstore(C_ALPHA_BASE_LOC, alpha) - - /** - * Generate zeta challenge - */ - mstore(0x00, challenge) - mstore(0x20, mload(T1_Y_LOC)) - mstore(0x40, mload(T1_X_LOC)) - mstore(0x60, mload(T2_Y_LOC)) - mstore(0x80, mload(T2_X_LOC)) - mstore(0xa0, mload(T3_Y_LOC)) - mstore(0xc0, mload(T3_X_LOC)) - mstore(0xe0, mload(T4_Y_LOC)) - mstore(0x100, mload(T4_X_LOC)) - - challenge := keccak256(0x00, 0x120) - - mstore(C_ZETA_LOC, mod(challenge, p)) - mstore(C_CURRENT_LOC, challenge) - } - - /** - * EVALUATE FIELD OPERATIONS - */ - - /** - * COMPUTE PUBLIC INPUT DELTA - * ΔPI = ∏ᵢ∈ℓ(wᵢ + β σ(i) + γ) / ∏ᵢ∈ℓ(wᵢ + β σ'(i) + γ) - */ - { - let beta := mload(C_BETA_LOC) // β - let gamma := mload(C_GAMMA_LOC) // γ - let work_root := mload(OMEGA_LOC) // ω - let numerator_value := 1 - let denominator_value := 1 - - let p_clone := p // move p to the front of the stack - let valid_inputs := true - - // Load the starting point of the public inputs (jump over the selector and the length of public inputs [0x24]) - let public_inputs_ptr := add(calldataload(0x24), 0x24) - - // endpoint_ptr = public_inputs_ptr + num_inputs * 0x20. // every public input is 0x20 bytes - let endpoint_ptr := add(public_inputs_ptr, mul(mload(NUM_INPUTS_LOC), 0x20)) - - // root_1 = β * 0x05 - let root_1 := mulmod(beta, 0x05, p_clone) // k1.β - // root_2 = β * 0x0c - let root_2 := mulmod(beta, 0x0c, p_clone) - // @note 0x05 + 0x07 == 0x0c == external coset generator - - for {} lt(public_inputs_ptr, endpoint_ptr) { public_inputs_ptr := add(public_inputs_ptr, 0x20) } { - /** - * input = public_input[i] - * valid_inputs &= input < p - * temp = input + gamma - * numerator_value *= (β.σ(i) + wᵢ + γ) // σ(i) = 0x05.ωⁱ - * denominator_value *= (β.σ'(i) + wᵢ + γ) // σ'(i) = 0x0c.ωⁱ - * root_1 *= ω - * root_2 *= ω - */ - - let input := calldataload(public_inputs_ptr) - valid_inputs := and(valid_inputs, lt(input, p_clone)) - let temp := addmod(input, gamma, p_clone) - - numerator_value := mulmod(numerator_value, add(root_1, temp), p_clone) - denominator_value := mulmod(denominator_value, add(root_2, temp), p_clone) - - root_1 := mulmod(root_1, work_root, p_clone) - root_2 := mulmod(root_2, work_root, p_clone) - } - - // Revert if not all public inputs are field elements (i.e. < p) - if iszero(valid_inputs) { - mstore(0x00, PUBLIC_INPUT_GE_P_SELECTOR) - revert(0x00, 0x04) - } - - mstore(DELTA_NUMERATOR_LOC, numerator_value) - mstore(DELTA_DENOMINATOR_LOC, denominator_value) - } - - /** - * Compute Plookup delta factor [γ(1 + β)]^{n-k} - * k = num roots cut out of Z_H = 4 - */ - { - let delta_base := mulmod(mload(C_GAMMA_LOC), addmod(mload(C_BETA_LOC), 1, p), p) - let delta_numerator := delta_base - { - let exponent := mload(N_LOC) - let count := 1 - for {} lt(count, exponent) { count := add(count, count) } { - delta_numerator := mulmod(delta_numerator, delta_numerator, p) - } - } - mstore(PLOOKUP_DELTA_NUMERATOR_LOC, delta_numerator) - - let delta_denominator := mulmod(delta_base, delta_base, p) - delta_denominator := mulmod(delta_denominator, delta_denominator, p) - mstore(PLOOKUP_DELTA_DENOMINATOR_LOC, delta_denominator) - } - /** - * Compute lagrange poly and vanishing poly fractions - */ - { - /** - * vanishing_numerator = zeta - * ZETA_POW_N = zeta^n - * vanishing_numerator -= 1 - * accumulating_root = omega_inverse - * work_root = p - accumulating_root - * domain_inverse = domain_inverse - * vanishing_denominator = zeta + work_root - * work_root *= accumulating_root - * vanishing_denominator *= (zeta + work_root) - * work_root *= accumulating_root - * vanishing_denominator *= (zeta + work_root) - * vanishing_denominator *= (zeta + (zeta + accumulating_root)) - * work_root = omega - * lagrange_numerator = vanishing_numerator * domain_inverse - * l_start_denominator = zeta - 1 - * accumulating_root = work_root^2 - * l_end_denominator = accumulating_root^2 * work_root * zeta - 1 - * Note: l_end_denominator term contains a term \omega^5 to cut out 5 roots of unity from vanishing poly - */ - - let zeta := mload(C_ZETA_LOC) - - // compute zeta^n, where n is a power of 2 - let vanishing_numerator := zeta - { - // pow_small - let exponent := mload(N_LOC) - let count := 1 - for {} lt(count, exponent) { count := add(count, count) } { - vanishing_numerator := mulmod(vanishing_numerator, vanishing_numerator, p) - } - } - mstore(ZETA_POW_N_LOC, vanishing_numerator) - vanishing_numerator := addmod(vanishing_numerator, sub(p, 1), p) - - let accumulating_root := mload(OMEGA_INVERSE_LOC) - let work_root := sub(p, accumulating_root) - let domain_inverse := mload(DOMAIN_INVERSE_LOC) - - let vanishing_denominator := addmod(zeta, work_root, p) - work_root := mulmod(work_root, accumulating_root, p) - vanishing_denominator := mulmod(vanishing_denominator, addmod(zeta, work_root, p), p) - work_root := mulmod(work_root, accumulating_root, p) - vanishing_denominator := mulmod(vanishing_denominator, addmod(zeta, work_root, p), p) - vanishing_denominator := - mulmod(vanishing_denominator, addmod(zeta, mulmod(work_root, accumulating_root, p), p), p) - - work_root := mload(OMEGA_LOC) - - let lagrange_numerator := mulmod(vanishing_numerator, domain_inverse, p) - let l_start_denominator := addmod(zeta, sub(p, 1), p) - - accumulating_root := mulmod(work_root, work_root, p) - - let l_end_denominator := - addmod( - mulmod(mulmod(mulmod(accumulating_root, accumulating_root, p), work_root, p), zeta, p), sub(p, 1), p - ) - - /** - * Compute inversions using Montgomery's batch inversion trick - */ - let accumulator := mload(DELTA_DENOMINATOR_LOC) - let t0 := accumulator - accumulator := mulmod(accumulator, vanishing_denominator, p) - let t1 := accumulator - accumulator := mulmod(accumulator, vanishing_numerator, p) - let t2 := accumulator - accumulator := mulmod(accumulator, l_start_denominator, p) - let t3 := accumulator - accumulator := mulmod(accumulator, mload(PLOOKUP_DELTA_DENOMINATOR_LOC), p) - let t4 := accumulator - { - mstore(0, 0x20) - mstore(0x20, 0x20) - mstore(0x40, 0x20) - mstore(0x60, mulmod(accumulator, l_end_denominator, p)) - mstore(0x80, sub(p, 2)) - mstore(0xa0, p) - if iszero(staticcall(gas(), 0x05, 0x00, 0xc0, 0x00, 0x20)) { - mstore(0x0, MOD_EXP_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - accumulator := mload(0x00) - } - - t4 := mulmod(accumulator, t4, p) - accumulator := mulmod(accumulator, l_end_denominator, p) - - t3 := mulmod(accumulator, t3, p) - accumulator := mulmod(accumulator, mload(PLOOKUP_DELTA_DENOMINATOR_LOC), p) - - t2 := mulmod(accumulator, t2, p) - accumulator := mulmod(accumulator, l_start_denominator, p) - - t1 := mulmod(accumulator, t1, p) - accumulator := mulmod(accumulator, vanishing_numerator, p) - - t0 := mulmod(accumulator, t0, p) - accumulator := mulmod(accumulator, vanishing_denominator, p) - - accumulator := mulmod(mulmod(accumulator, accumulator, p), mload(DELTA_DENOMINATOR_LOC), p) - - mstore(PUBLIC_INPUT_DELTA_LOC, mulmod(mload(DELTA_NUMERATOR_LOC), accumulator, p)) - mstore(ZERO_POLY_LOC, mulmod(vanishing_numerator, t0, p)) - mstore(ZERO_POLY_INVERSE_LOC, mulmod(vanishing_denominator, t1, p)) - mstore(L_START_LOC, mulmod(lagrange_numerator, t2, p)) - mstore(PLOOKUP_DELTA_LOC, mulmod(mload(PLOOKUP_DELTA_NUMERATOR_LOC), t3, p)) - mstore(L_END_LOC, mulmod(lagrange_numerator, t4, p)) - } - - /** - * UltraPlonk Widget Ordering: - * - * 1. Permutation widget - * 2. Plookup widget - * 3. Arithmetic widget - * 4. Fixed base widget (?) - * 5. GenPermSort widget - * 6. Elliptic widget - * 7. Auxiliary widget - */ - - /** - * COMPUTE PERMUTATION WIDGET EVALUATION - */ - { - let alpha := mload(C_ALPHA_LOC) - let beta := mload(C_BETA_LOC) - let gamma := mload(C_GAMMA_LOC) - - /** - * t1 = (W1 + gamma + beta * ID1) * (W2 + gamma + beta * ID2) - * t2 = (W3 + gamma + beta * ID3) * (W4 + gamma + beta * ID4) - * result = alpha_base * z_eval * t1 * t2 - * t1 = (W1 + gamma + beta * sigma_1_eval) * (W2 + gamma + beta * sigma_2_eval) - * t2 = (W2 + gamma + beta * sigma_3_eval) * (W3 + gamma + beta * sigma_4_eval) - * result -= (alpha_base * z_omega_eval * t1 * t2) - */ - let t1 := - mulmod( - add(add(mload(W1_EVAL_LOC), gamma), mulmod(beta, mload(ID1_EVAL_LOC), p)), - add(add(mload(W2_EVAL_LOC), gamma), mulmod(beta, mload(ID2_EVAL_LOC), p)), - p - ) - let t2 := - mulmod( - add(add(mload(W3_EVAL_LOC), gamma), mulmod(beta, mload(ID3_EVAL_LOC), p)), - add(add(mload(W4_EVAL_LOC), gamma), mulmod(beta, mload(ID4_EVAL_LOC), p)), - p - ) - let result := mulmod(mload(C_ALPHA_BASE_LOC), mulmod(mload(Z_EVAL_LOC), mulmod(t1, t2, p), p), p) - t1 := - mulmod( - add(add(mload(W1_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA1_EVAL_LOC), p)), - add(add(mload(W2_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA2_EVAL_LOC), p)), - p - ) - t2 := - mulmod( - add(add(mload(W3_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA3_EVAL_LOC), p)), - add(add(mload(W4_EVAL_LOC), gamma), mulmod(beta, mload(SIGMA4_EVAL_LOC), p)), - p - ) - result := - addmod( - result, - sub(p, mulmod(mload(C_ALPHA_BASE_LOC), mulmod(mload(Z_OMEGA_EVAL_LOC), mulmod(t1, t2, p), p), p)), - p - ) - - /** - * alpha_base *= alpha - * result += alpha_base . (L_{n-k}(ʓ) . (z(ʓ.ω) - ∆_{PI})) - * alpha_base *= alpha - * result += alpha_base . (L_1(ʓ)(Z(ʓ) - 1)) - * alpha_Base *= alpha - */ - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - result := - addmod( - result, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod( - mload(L_END_LOC), - addmod(mload(Z_OMEGA_EVAL_LOC), sub(p, mload(PUBLIC_INPUT_DELTA_LOC)), p), - p - ), - p - ), - p - ) - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - mstore( - PERMUTATION_IDENTITY, - addmod( - result, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod(mload(L_START_LOC), addmod(mload(Z_EVAL_LOC), sub(p, 1), p), p), - p - ), - p - ) - ) - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p)) - } - - /** - * COMPUTE PLOOKUP WIDGET EVALUATION - */ - { - /** - * Goal: f = (w1(z) + q2.w1(zω)) + η(w2(z) + qm.w2(zω)) + η²(w3(z) + qc.w_3(zω)) + q3(z).η³ - * f = η.q3(z) - * f += (w3(z) + qc.w_3(zω)) - * f *= η - * f += (w2(z) + qm.w2(zω)) - * f *= η - * f += (w1(z) + q2.w1(zω)) - */ - let f := mulmod(mload(C_ETA_LOC), mload(Q3_EVAL_LOC), p) - f := - addmod(f, addmod(mload(W3_EVAL_LOC), mulmod(mload(QC_EVAL_LOC), mload(W3_OMEGA_EVAL_LOC), p), p), p) - f := mulmod(f, mload(C_ETA_LOC), p) - f := - addmod(f, addmod(mload(W2_EVAL_LOC), mulmod(mload(QM_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), p), p) - f := mulmod(f, mload(C_ETA_LOC), p) - f := - addmod(f, addmod(mload(W1_EVAL_LOC), mulmod(mload(Q2_EVAL_LOC), mload(W1_OMEGA_EVAL_LOC), p), p), p) - - // t(z) = table4(z).η³ + table3(z).η² + table2(z).η + table1(z) - let t := - addmod( - addmod( - addmod( - mulmod(mload(TABLE4_EVAL_LOC), mload(C_ETA_CUBE_LOC), p), - mulmod(mload(TABLE3_EVAL_LOC), mload(C_ETA_SQR_LOC), p), - p - ), - mulmod(mload(TABLE2_EVAL_LOC), mload(C_ETA_LOC), p), - p - ), - mload(TABLE1_EVAL_LOC), - p - ) - - // t(zw) = table4(zw).η³ + table3(zw).η² + table2(zw).η + table1(zw) - let t_omega := - addmod( - addmod( - addmod( - mulmod(mload(TABLE4_OMEGA_EVAL_LOC), mload(C_ETA_CUBE_LOC), p), - mulmod(mload(TABLE3_OMEGA_EVAL_LOC), mload(C_ETA_SQR_LOC), p), - p - ), - mulmod(mload(TABLE2_OMEGA_EVAL_LOC), mload(C_ETA_LOC), p), - p - ), - mload(TABLE1_OMEGA_EVAL_LOC), - p - ) - - /** - * Goal: numerator = (TABLE_TYPE_EVAL * f(z) + γ) * (t(z) + βt(zω) + γ(β + 1)) * (β + 1) - * gamma_beta_constant = γ(β + 1) - * numerator = f * TABLE_TYPE_EVAL + gamma - * temp0 = t(z) + t(zω) * β + gamma_beta_constant - * numerator *= temp0 - * numerator *= (β + 1) - * temp0 = alpha * l_1 - * numerator += temp0 - * numerator *= z_lookup(z) - * numerator -= temp0 - */ - let gamma_beta_constant := mulmod(mload(C_GAMMA_LOC), addmod(mload(C_BETA_LOC), 1, p), p) - let numerator := addmod(mulmod(f, mload(TABLE_TYPE_EVAL_LOC), p), mload(C_GAMMA_LOC), p) - let temp0 := addmod(addmod(t, mulmod(t_omega, mload(C_BETA_LOC), p), p), gamma_beta_constant, p) - numerator := mulmod(numerator, temp0, p) - numerator := mulmod(numerator, addmod(mload(C_BETA_LOC), 1, p), p) - temp0 := mulmod(mload(C_ALPHA_LOC), mload(L_START_LOC), p) - numerator := addmod(numerator, temp0, p) - numerator := mulmod(numerator, mload(Z_LOOKUP_EVAL_LOC), p) - numerator := addmod(numerator, sub(p, temp0), p) - - /** - * Goal: denominator = z_lookup(zω)*[s(z) + βs(zω) + γ(1 + β)] - [z_lookup(zω) - [γ(1 + β)]^{n-k}]*α²L_end(z) - * note: delta_factor = [γ(1 + β)]^{n-k} - * denominator = s(z) + βs(zω) + γ(β + 1) - * temp1 = α²L_end(z) - * denominator -= temp1 - * denominator *= z_lookup(zω) - * denominator += temp1 * delta_factor - * PLOOKUP_IDENTITY = (numerator - denominator).alpha_base - * alpha_base *= alpha^3 - */ - let denominator := - addmod( - addmod(mload(S_EVAL_LOC), mulmod(mload(S_OMEGA_EVAL_LOC), mload(C_BETA_LOC), p), p), - gamma_beta_constant, - p - ) - let temp1 := mulmod(mload(C_ALPHA_SQR_LOC), mload(L_END_LOC), p) - denominator := addmod(denominator, sub(p, temp1), p) - denominator := mulmod(denominator, mload(Z_LOOKUP_OMEGA_EVAL_LOC), p) - denominator := addmod(denominator, mulmod(temp1, mload(PLOOKUP_DELTA_LOC), p), p) - - mstore(PLOOKUP_IDENTITY, mulmod(addmod(numerator, sub(p, denominator), p), mload(C_ALPHA_BASE_LOC), p)) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p)) - } - - /** - * COMPUTE ARITHMETIC WIDGET EVALUATION - */ - { - /** - * The basic arithmetic gate identity in standard plonk is as follows. - * (w_1 . w_2 . q_m) + (w_1 . q_1) + (w_2 . q_2) + (w_3 . q_3) + (w_4 . q_4) + q_c = 0 - * However, for Ultraplonk, we extend this to support "passing" wires between rows (shown without alpha scaling below): - * q_arith * ( ( (-1/2) * (q_arith - 3) * q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c ) + - * (q_arith - 1)*( α * (q_arith - 2) * (w_1 + w_4 - w_1_omega + q_m) + w_4_omega) ) = 0 - * - * This formula results in several cases depending on q_arith: - * 1. q_arith == 0: Arithmetic gate is completely disabled - * - * 2. q_arith == 1: Everything in the minigate on the right is disabled. The equation is just a standard plonk equation - * with extra wires: q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c = 0 - * - * 3. q_arith == 2: The (w_1 + w_4 - ...) term is disabled. THe equation is: - * (1/2) * q_m * w_1 * w_2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + w_4_omega = 0 - * It allows defining w_4 at next index (w_4_omega) in terms of current wire values - * - * 4. q_arith == 3: The product of w_1 and w_2 is disabled, but a mini addition gate is enabled. α allows us to split - * the equation into two: - * - * q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + 2 * w_4_omega = 0 - * and - * w_1 + w_4 - w_1_omega + q_m = 0 (we are reusing q_m here) - * - * 5. q_arith > 3: The product of w_1 and w_2 is scaled by (q_arith - 3), while the w_4_omega term is scaled by (q_arith - 1). - * The equation can be split into two: - * - * (q_arith - 3)* q_m * w_1 * w_ 2 + q_1 * w_1 + q_2 * w_2 + q_3 * w_3 + q_4 * w_4 + q_c + (q_arith - 1) * w_4_omega = 0 - * and - * w_1 + w_4 - w_1_omega + q_m = 0 - * - * The problem that q_m is used both in both equations can be dealt with by appropriately changing selector values at - * the next gate. Then we can treat (q_arith - 1) as a simulated q_6 selector and scale q_m to handle (q_arith - 3) at - * product. - */ - - let w1q1 := mulmod(mload(W1_EVAL_LOC), mload(Q1_EVAL_LOC), p) - let w2q2 := mulmod(mload(W2_EVAL_LOC), mload(Q2_EVAL_LOC), p) - let w3q3 := mulmod(mload(W3_EVAL_LOC), mload(Q3_EVAL_LOC), p) - let w4q3 := mulmod(mload(W4_EVAL_LOC), mload(Q4_EVAL_LOC), p) - - // @todo - Add a explicit test that hits QARITH == 3 - // w1w2qm := (w_1 . w_2 . q_m . (QARITH_EVAL_LOC - 3)) / 2 - let w1w2qm := - mulmod( - mulmod( - mulmod(mulmod(mload(W1_EVAL_LOC), mload(W2_EVAL_LOC), p), mload(QM_EVAL_LOC), p), - addmod(mload(QARITH_EVAL_LOC), sub(p, 3), p), - p - ), - NEGATIVE_INVERSE_OF_2_MODULO_P, - p - ) - - // (w_1 . w_2 . q_m . (q_arith - 3)) / -2) + (w_1 . q_1) + (w_2 . q_2) + (w_3 . q_3) + (w_4 . q_4) + q_c - let identity := - addmod( - mload(QC_EVAL_LOC), addmod(w4q3, addmod(w3q3, addmod(w2q2, addmod(w1q1, w1w2qm, p), p), p), p), p - ) - - // if q_arith == 3 we evaluate an additional mini addition gate (on top of the regular one), where: - // w_1 + w_4 - w_1_omega + q_m = 0 - // we use this gate to save an addition gate when adding or subtracting non-native field elements - // α * (q_arith - 2) * (w_1 + w_4 - w_1_omega + q_m) - let extra_small_addition_gate_identity := - mulmod( - mload(C_ALPHA_LOC), - mulmod( - addmod(mload(QARITH_EVAL_LOC), sub(p, 2), p), - addmod( - mload(QM_EVAL_LOC), - addmod( - sub(p, mload(W1_OMEGA_EVAL_LOC)), addmod(mload(W1_EVAL_LOC), mload(W4_EVAL_LOC), p), p - ), - p - ), - p - ), - p - ) - - // if q_arith == 2 OR q_arith == 3 we add the 4th wire of the NEXT gate into the arithmetic identity - // N.B. if q_arith > 2, this wire value will be scaled by (q_arith - 1) relative to the other gate wires! - // alpha_base * q_arith * (identity + (q_arith - 1) * (w_4_omega + extra_small_addition_gate_identity)) - mstore( - ARITHMETIC_IDENTITY, - mulmod( - mload(C_ALPHA_BASE_LOC), - mulmod( - mload(QARITH_EVAL_LOC), - addmod( - identity, - mulmod( - addmod(mload(QARITH_EVAL_LOC), sub(p, 1), p), - addmod(mload(W4_OMEGA_EVAL_LOC), extra_small_addition_gate_identity, p), - p - ), - p - ), - p - ), - p - ) - ) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_SQR_LOC), p)) - } - - /** - * COMPUTE GENPERMSORT WIDGET EVALUATION - */ - { - /** - * D1 = (w2 - w1) - * D2 = (w3 - w2) - * D3 = (w4 - w3) - * D4 = (w1_omega - w4) - * - * α_a = alpha_base - * α_b = alpha_base * α - * α_c = alpha_base * α^2 - * α_d = alpha_base * α^3 - * - * range_accumulator = ( - * D1(D1 - 1)(D1 - 2)(D1 - 3).α_a + - * D2(D2 - 1)(D2 - 2)(D2 - 3).α_b + - * D3(D3 - 1)(D3 - 2)(D3 - 3).α_c + - * D4(D4 - 1)(D4 - 2)(D4 - 3).α_d + - * ) . q_sort - */ - let minus_two := sub(p, 2) - let minus_three := sub(p, 3) - let d1 := addmod(mload(W2_EVAL_LOC), sub(p, mload(W1_EVAL_LOC)), p) - let d2 := addmod(mload(W3_EVAL_LOC), sub(p, mload(W2_EVAL_LOC)), p) - let d3 := addmod(mload(W4_EVAL_LOC), sub(p, mload(W3_EVAL_LOC)), p) - let d4 := addmod(mload(W1_OMEGA_EVAL_LOC), sub(p, mload(W4_EVAL_LOC)), p) - - let range_accumulator := - mulmod( - mulmod( - mulmod(addmod(mulmod(d1, d1, p), sub(p, d1), p), addmod(d1, minus_two, p), p), - addmod(d1, minus_three, p), - p - ), - mload(C_ALPHA_BASE_LOC), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d2, d2, p), sub(p, d2), p), addmod(d2, minus_two, p), p), - addmod(d2, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), - p - ), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d3, d3, p), sub(p, d3), p), addmod(d3, minus_two, p), p), - addmod(d3, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_SQR_LOC), p), - p - ), - p - ) - range_accumulator := - addmod( - range_accumulator, - mulmod( - mulmod( - mulmod(addmod(mulmod(d4, d4, p), sub(p, d4), p), addmod(d4, minus_two, p), p), - addmod(d4, minus_three, p), - p - ), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p), - p - ), - p - ) - range_accumulator := mulmod(range_accumulator, mload(QSORT_EVAL_LOC), p) - - mstore(SORT_IDENTITY, range_accumulator) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_QUAD_LOC), p)) - } - - /** - * COMPUTE ELLIPTIC WIDGET EVALUATION - */ - { - /** - * endo_term = (-x_2) * x_1 * (x_3 * 2 + x_1) * q_beta - * endo_sqr_term = x_2^2 - * endo_sqr_term *= (x_3 - x_1) - * endo_sqr_term *= q_beta^2 - * leftovers = x_2^2 - * leftovers *= x_2 - * leftovers += x_1^2 * (x_3 + x_1) @follow-up Invalid comment in BB widget - * leftovers -= (y_2^2 + y_1^2) - * sign_term = y_2 * y_1 - * sign_term += sign_term - * sign_term *= q_sign - */ - // q_elliptic * (x3 + x2 + x1)(x2 - x1)(x2 - x1) - y2^2 - y1^2 + 2(y2y1)*q_sign = 0 - let x_diff := addmod(mload(X2_EVAL_LOC), sub(p, mload(X1_EVAL_LOC)), p) - let y2_sqr := mulmod(mload(Y2_EVAL_LOC), mload(Y2_EVAL_LOC), p) - let y1_sqr := mulmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p) - let y1y2 := mulmod(mulmod(mload(Y1_EVAL_LOC), mload(Y2_EVAL_LOC), p), mload(QSIGN_LOC), p) - - let x_add_identity := - addmod( - mulmod( - addmod(mload(X3_EVAL_LOC), addmod(mload(X2_EVAL_LOC), mload(X1_EVAL_LOC), p), p), - mulmod(x_diff, x_diff, p), - p - ), - addmod( - sub( - p, - addmod(y2_sqr, y1_sqr, p) - ), - addmod(y1y2, y1y2, p), - p - ), - p - ) - x_add_identity := - mulmod( - mulmod( - x_add_identity, - addmod( - 1, - sub(p, mload(QM_EVAL_LOC)), - p - ), - p - ), - mload(C_ALPHA_BASE_LOC), - p - ) - - // q_elliptic * (x3 + x2 + x1)(x2 - x1)(x2 - x1) - y2^2 - y1^2 + 2(y2y1)*q_sign = 0 - let y1_plus_y3 := addmod( - mload(Y1_EVAL_LOC), - mload(Y3_EVAL_LOC), - p - ) - let y_diff := addmod(mulmod(mload(Y2_EVAL_LOC), mload(QSIGN_LOC), p), sub(p, mload(Y1_EVAL_LOC)), p) - let y_add_identity := - addmod( - mulmod(y1_plus_y3, x_diff, p), - mulmod(addmod(mload(X3_EVAL_LOC), sub(p, mload(X1_EVAL_LOC)), p), y_diff, p), - p - ) - y_add_identity := - mulmod( - mulmod(y_add_identity, addmod(1, sub(p, mload(QM_EVAL_LOC)), p), p), - mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), - p - ) - - // ELLIPTIC_IDENTITY = (x_identity + y_identity) * Q_ELLIPTIC_EVAL - mstore( - ELLIPTIC_IDENTITY, mulmod(addmod(x_add_identity, y_add_identity, p), mload(QELLIPTIC_EVAL_LOC), p) - ) - } - { - /** - * x_pow_4 = (y_1_sqr - curve_b) * x_1; - * y_1_sqr_mul_4 = y_1_sqr + y_1_sqr; - * y_1_sqr_mul_4 += y_1_sqr_mul_4; - * x_1_pow_4_mul_9 = x_pow_4; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_1_pow_4_mul_9; - * x_1_pow_4_mul_9 += x_pow_4; - * x_1_sqr_mul_3 = x_1_sqr + x_1_sqr + x_1_sqr; - * x_double_identity = (x_3 + x_1 + x_1) * y_1_sqr_mul_4 - x_1_pow_4_mul_9; - * y_double_identity = x_1_sqr_mul_3 * (x_1 - x_3) - (y_1 + y_1) * (y_1 + y_3); - */ - // (x3 + x1 + x1) (4y1*y1) - 9 * x1 * x1 * x1 * x1 = 0 - let x1_sqr := mulmod(mload(X1_EVAL_LOC), mload(X1_EVAL_LOC), p) - let y1_sqr := mulmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p) - let x_pow_4 := mulmod(addmod(y1_sqr, GRUMPKIN_CURVE_B_PARAMETER_NEGATED, p), mload(X1_EVAL_LOC), p) - let y1_sqr_mul_4 := mulmod(y1_sqr, 4, p) - let x1_pow_4_mul_9 := mulmod(x_pow_4, 9, p) - let x1_sqr_mul_3 := mulmod(x1_sqr, 3, p) - let x_double_identity := - addmod( - mulmod( - addmod(mload(X3_EVAL_LOC), addmod(mload(X1_EVAL_LOC), mload(X1_EVAL_LOC), p), p), - y1_sqr_mul_4, - p - ), - sub(p, x1_pow_4_mul_9), - p - ) - // (y1 + y1) (2y1) - (3 * x1 * x1)(x1 - x3) = 0 - let y_double_identity := - addmod( - mulmod(x1_sqr_mul_3, addmod(mload(X1_EVAL_LOC), sub(p, mload(X3_EVAL_LOC)), p), p), - sub( - p, - mulmod( - addmod(mload(Y1_EVAL_LOC), mload(Y1_EVAL_LOC), p), - addmod(mload(Y1_EVAL_LOC), mload(Y3_EVAL_LOC), p), - p - ) - ), - p - ) - x_double_identity := mulmod(x_double_identity, mload(C_ALPHA_BASE_LOC), p) - y_double_identity := - mulmod(y_double_identity, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_LOC), p), p) - x_double_identity := mulmod(x_double_identity, mload(QM_EVAL_LOC), p) - y_double_identity := mulmod(y_double_identity, mload(QM_EVAL_LOC), p) - // ELLIPTIC_IDENTITY += (x_double_identity + y_double_identity) * Q_DOUBLE_EVAL - mstore( - ELLIPTIC_IDENTITY, - addmod( - mload(ELLIPTIC_IDENTITY), - mulmod(addmod(x_double_identity, y_double_identity, p), mload(QELLIPTIC_EVAL_LOC), p), - p - ) - ) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_QUAD_LOC), p)) - } - - /** - * COMPUTE AUXILIARY WIDGET EVALUATION - */ - { - { - /** - * Non native field arithmetic gate 2 - * _ _ - * / _ _ _ 14 \ - * q_2 . q_4 | (w_1 . w_2) + (w_1 . w_2) + (w_1 . w_4 + w_2 . w_3 - w_3) . 2 - w_3 - w_4 | - * \_ _/ - * - * limb_subproduct = w_1 . w_2_omega + w_1_omega . w_2 - * non_native_field_gate_2 = w_1 * w_4 + w_4 * w_3 - w_3_omega - * non_native_field_gate_2 = non_native_field_gate_2 * limb_size - * non_native_field_gate_2 -= w_4_omega - * non_native_field_gate_2 += limb_subproduct - * non_native_field_gate_2 *= q_4 - * limb_subproduct *= limb_size - * limb_subproduct += w_1_omega * w_2_omega - * non_native_field_gate_1 = (limb_subproduct + w_3 + w_4) * q_3 - * non_native_field_gate_3 = (limb_subproduct + w_4 - (w_3_omega + w_4_omega)) * q_m - * non_native_field_identity = (non_native_field_gate_1 + non_native_field_gate_2 + non_native_field_gate_3) * q_2 - */ - - let limb_subproduct := - addmod( - mulmod(mload(W1_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), - mulmod(mload(W1_OMEGA_EVAL_LOC), mload(W2_EVAL_LOC), p), - p - ) - - let non_native_field_gate_2 := - addmod( - addmod( - mulmod(mload(W1_EVAL_LOC), mload(W4_EVAL_LOC), p), - mulmod(mload(W2_EVAL_LOC), mload(W3_EVAL_LOC), p), - p - ), - sub(p, mload(W3_OMEGA_EVAL_LOC)), - p - ) - non_native_field_gate_2 := mulmod(non_native_field_gate_2, LIMB_SIZE, p) - non_native_field_gate_2 := addmod(non_native_field_gate_2, sub(p, mload(W4_OMEGA_EVAL_LOC)), p) - non_native_field_gate_2 := addmod(non_native_field_gate_2, limb_subproduct, p) - non_native_field_gate_2 := mulmod(non_native_field_gate_2, mload(Q4_EVAL_LOC), p) - limb_subproduct := mulmod(limb_subproduct, LIMB_SIZE, p) - limb_subproduct := - addmod(limb_subproduct, mulmod(mload(W1_OMEGA_EVAL_LOC), mload(W2_OMEGA_EVAL_LOC), p), p) - let non_native_field_gate_1 := - mulmod( - addmod(limb_subproduct, sub(p, addmod(mload(W3_EVAL_LOC), mload(W4_EVAL_LOC), p)), p), - mload(Q3_EVAL_LOC), - p - ) - let non_native_field_gate_3 := - mulmod( - addmod( - addmod(limb_subproduct, mload(W4_EVAL_LOC), p), - sub(p, addmod(mload(W3_OMEGA_EVAL_LOC), mload(W4_OMEGA_EVAL_LOC), p)), - p - ), - mload(QM_EVAL_LOC), - p - ) - let non_native_field_identity := - mulmod( - addmod(addmod(non_native_field_gate_1, non_native_field_gate_2, p), non_native_field_gate_3, p), - mload(Q2_EVAL_LOC), - p - ) - - mstore(AUX_NON_NATIVE_FIELD_EVALUATION, non_native_field_identity) - } - - { - /** - * limb_accumulator_1 = w_2_omega; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_1_omega; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_3; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_2; - * limb_accumulator_1 *= SUBLIMB_SHIFT; - * limb_accumulator_1 += w_1; - * limb_accumulator_1 -= w_4; - * limb_accumulator_1 *= q_4; - */ - let limb_accumulator_1 := mulmod(mload(W2_OMEGA_EVAL_LOC), SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W1_OMEGA_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W3_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W2_EVAL_LOC), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, SUBLIMB_SHIFT, p) - limb_accumulator_1 := addmod(limb_accumulator_1, mload(W1_EVAL_LOC), p) - limb_accumulator_1 := addmod(limb_accumulator_1, sub(p, mload(W4_EVAL_LOC)), p) - limb_accumulator_1 := mulmod(limb_accumulator_1, mload(Q4_EVAL_LOC), p) - - /** - * limb_accumulator_2 = w_3_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_2_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_1_omega; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_4; - * limb_accumulator_2 *= SUBLIMB_SHIFT; - * limb_accumulator_2 += w_3; - * limb_accumulator_2 -= w_4_omega; - * limb_accumulator_2 *= q_m; - */ - let limb_accumulator_2 := mulmod(mload(W3_OMEGA_EVAL_LOC), SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W2_OMEGA_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W1_OMEGA_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W4_EVAL_LOC), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, SUBLIMB_SHIFT, p) - limb_accumulator_2 := addmod(limb_accumulator_2, mload(W3_EVAL_LOC), p) - limb_accumulator_2 := addmod(limb_accumulator_2, sub(p, mload(W4_OMEGA_EVAL_LOC)), p) - limb_accumulator_2 := mulmod(limb_accumulator_2, mload(QM_EVAL_LOC), p) - - mstore( - AUX_LIMB_ACCUMULATOR_EVALUATION, - mulmod(addmod(limb_accumulator_1, limb_accumulator_2, p), mload(Q3_EVAL_LOC), p) - ) - } - - { - /** - * memory_record_check = w_3; - * memory_record_check *= eta; - * memory_record_check += w_2; - * memory_record_check *= eta; - * memory_record_check += w_1; - * memory_record_check *= eta; - * memory_record_check += q_c; - * - * partial_record_check = memory_record_check; - * - * memory_record_check -= w_4; - */ - - let memory_record_check := mulmod(mload(W3_EVAL_LOC), mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(W2_EVAL_LOC), p) - memory_record_check := mulmod(memory_record_check, mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(W1_EVAL_LOC), p) - memory_record_check := mulmod(memory_record_check, mload(C_ETA_LOC), p) - memory_record_check := addmod(memory_record_check, mload(QC_EVAL_LOC), p) - - let partial_record_check := memory_record_check - memory_record_check := addmod(memory_record_check, sub(p, mload(W4_EVAL_LOC)), p) - - mstore(AUX_MEMORY_EVALUATION, memory_record_check) - - // index_delta = w_1_omega - w_1 - let index_delta := addmod(mload(W1_OMEGA_EVAL_LOC), sub(p, mload(W1_EVAL_LOC)), p) - // record_delta = w_4_omega - w_4 - let record_delta := addmod(mload(W4_OMEGA_EVAL_LOC), sub(p, mload(W4_EVAL_LOC)), p) - // index_is_monotonically_increasing = index_delta * (index_delta - 1) - let index_is_monotonically_increasing := mulmod(index_delta, addmod(index_delta, sub(p, 1), p), p) - - // adjacent_values_match_if_adjacent_indices_match = record_delta * (1 - index_delta) - let adjacent_values_match_if_adjacent_indices_match := - mulmod(record_delta, addmod(1, sub(p, index_delta), p), p) - - // AUX_ROM_CONSISTENCY_EVALUATION = ((adjacent_values_match_if_adjacent_indices_match * alpha) + index_is_monotonically_increasing) * alpha + partial_record_check - mstore( - AUX_ROM_CONSISTENCY_EVALUATION, - addmod( - mulmod( - addmod( - mulmod(adjacent_values_match_if_adjacent_indices_match, mload(C_ALPHA_LOC), p), - index_is_monotonically_increasing, - p - ), - mload(C_ALPHA_LOC), - p - ), - memory_record_check, - p - ) - ) - - { - /** - * next_gate_access_type = w_3_omega; - * next_gate_access_type *= eta; - * next_gate_access_type += w_2_omega; - * next_gate_access_type *= eta; - * next_gate_access_type += w_1_omega; - * next_gate_access_type *= eta; - * next_gate_access_type = w_4_omega - next_gate_access_type; - */ - let next_gate_access_type := mulmod(mload(W3_OMEGA_EVAL_LOC), mload(C_ETA_LOC), p) - next_gate_access_type := addmod(next_gate_access_type, mload(W2_OMEGA_EVAL_LOC), p) - next_gate_access_type := mulmod(next_gate_access_type, mload(C_ETA_LOC), p) - next_gate_access_type := addmod(next_gate_access_type, mload(W1_OMEGA_EVAL_LOC), p) - next_gate_access_type := mulmod(next_gate_access_type, mload(C_ETA_LOC), p) - next_gate_access_type := addmod(mload(W4_OMEGA_EVAL_LOC), sub(p, next_gate_access_type), p) - - // value_delta = w_3_omega - w_3 - let value_delta := addmod(mload(W3_OMEGA_EVAL_LOC), sub(p, mload(W3_EVAL_LOC)), p) - // adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation = (1 - index_delta) * value_delta * (1 - next_gate_access_type); - - let adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation := - mulmod( - addmod(1, sub(p, index_delta), p), - mulmod(value_delta, addmod(1, sub(p, next_gate_access_type), p), p), - p - ) - - // AUX_RAM_CONSISTENCY_EVALUATION - - /** - * access_type = w_4 - partial_record_check - * access_check = access_type^2 - access_type - * next_gate_access_type_is_boolean = next_gate_access_type^2 - next_gate_access_type - * RAM_consistency_check_identity = adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += index_is_monotonically_increasing; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += next_gate_access_type_is_boolean; - * RAM_consistency_check_identity *= alpha; - * RAM_consistency_check_identity += access_check; - */ - - let access_type := addmod(mload(W4_EVAL_LOC), sub(p, partial_record_check), p) - let access_check := mulmod(access_type, addmod(access_type, sub(p, 1), p), p) - let next_gate_access_type_is_boolean := - mulmod(next_gate_access_type, addmod(next_gate_access_type, sub(p, 1), p), p) - let RAM_cci := - mulmod( - adjacent_values_match_if_adjacent_indices_match_and_next_access_is_a_read_operation, - mload(C_ALPHA_LOC), - p - ) - RAM_cci := addmod(RAM_cci, index_is_monotonically_increasing, p) - RAM_cci := mulmod(RAM_cci, mload(C_ALPHA_LOC), p) - RAM_cci := addmod(RAM_cci, next_gate_access_type_is_boolean, p) - RAM_cci := mulmod(RAM_cci, mload(C_ALPHA_LOC), p) - RAM_cci := addmod(RAM_cci, access_check, p) - - mstore(AUX_RAM_CONSISTENCY_EVALUATION, RAM_cci) - } - - { - // timestamp_delta = w_2_omega - w_2 - let timestamp_delta := addmod(mload(W2_OMEGA_EVAL_LOC), sub(p, mload(W2_EVAL_LOC)), p) - - // RAM_timestamp_check_identity = (1 - index_delta) * timestamp_delta - w_3 - let RAM_timestamp_check_identity := - addmod( - mulmod(timestamp_delta, addmod(1, sub(p, index_delta), p), p), sub(p, mload(W3_EVAL_LOC)), p - ) - - /** - * memory_identity = ROM_consistency_check_identity * q_2; - * memory_identity += RAM_timestamp_check_identity * q_4; - * memory_identity += memory_record_check * q_m; - * memory_identity *= q_1; - * memory_identity += (RAM_consistency_check_identity * q_arith); - * - * auxiliary_identity = memory_identity + non_native_field_identity + limb_accumulator_identity; - * auxiliary_identity *= q_aux; - * auxiliary_identity *= alpha_base; - */ - let memory_identity := mulmod(mload(AUX_ROM_CONSISTENCY_EVALUATION), mload(Q2_EVAL_LOC), p) - memory_identity := - addmod(memory_identity, mulmod(RAM_timestamp_check_identity, mload(Q4_EVAL_LOC), p), p) - memory_identity := - addmod(memory_identity, mulmod(mload(AUX_MEMORY_EVALUATION), mload(QM_EVAL_LOC), p), p) - memory_identity := mulmod(memory_identity, mload(Q1_EVAL_LOC), p) - memory_identity := - addmod( - memory_identity, mulmod(mload(AUX_RAM_CONSISTENCY_EVALUATION), mload(QARITH_EVAL_LOC), p), p - ) - - let auxiliary_identity := addmod(memory_identity, mload(AUX_NON_NATIVE_FIELD_EVALUATION), p) - auxiliary_identity := addmod(auxiliary_identity, mload(AUX_LIMB_ACCUMULATOR_EVALUATION), p) - auxiliary_identity := mulmod(auxiliary_identity, mload(QAUX_EVAL_LOC), p) - auxiliary_identity := mulmod(auxiliary_identity, mload(C_ALPHA_BASE_LOC), p) - - mstore(AUX_IDENTITY, auxiliary_identity) - - // update alpha - mstore(C_ALPHA_BASE_LOC, mulmod(mload(C_ALPHA_BASE_LOC), mload(C_ALPHA_CUBE_LOC), p)) - } - } - } - - { - /** - * quotient = ARITHMETIC_IDENTITY - * quotient += PERMUTATION_IDENTITY - * quotient += PLOOKUP_IDENTITY - * quotient += SORT_IDENTITY - * quotient += ELLIPTIC_IDENTITY - * quotient += AUX_IDENTITY - * quotient *= ZERO_POLY_INVERSE - */ - mstore( - QUOTIENT_EVAL_LOC, - mulmod( - addmod( - addmod( - addmod( - addmod( - addmod(mload(PERMUTATION_IDENTITY), mload(PLOOKUP_IDENTITY), p), - mload(ARITHMETIC_IDENTITY), - p - ), - mload(SORT_IDENTITY), - p - ), - mload(ELLIPTIC_IDENTITY), - p - ), - mload(AUX_IDENTITY), - p - ), - mload(ZERO_POLY_INVERSE_LOC), - p - ) - ) - } - - /** - * GENERATE NU AND SEPARATOR CHALLENGES - */ - { - let current_challenge := mload(C_CURRENT_LOC) - // get a calldata pointer that points to the start of the data we want to copy - let calldata_ptr := add(calldataload(0x04), 0x24) - - calldata_ptr := add(calldata_ptr, NU_CALLDATA_SKIP_LENGTH) - - mstore(NU_CHALLENGE_INPUT_LOC_A, current_challenge) - mstore(NU_CHALLENGE_INPUT_LOC_B, mload(QUOTIENT_EVAL_LOC)) - calldatacopy(NU_CHALLENGE_INPUT_LOC_C, calldata_ptr, NU_INPUT_LENGTH) - - // hash length = (0x20 + num field elements), we include the previous challenge in the hash - let challenge := keccak256(NU_CHALLENGE_INPUT_LOC_A, add(NU_INPUT_LENGTH, 0x40)) - - mstore(C_V0_LOC, mod(challenge, p)) - // We need THIRTY-ONE independent nu challenges! - mstore(0x00, challenge) - mstore8(0x20, 0x01) - mstore(C_V1_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x02) - mstore(C_V2_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x03) - mstore(C_V3_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x04) - mstore(C_V4_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x05) - mstore(C_V5_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x06) - mstore(C_V6_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x07) - mstore(C_V7_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x08) - mstore(C_V8_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x09) - mstore(C_V9_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0a) - mstore(C_V10_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0b) - mstore(C_V11_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0c) - mstore(C_V12_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0d) - mstore(C_V13_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0e) - mstore(C_V14_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x0f) - mstore(C_V15_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x10) - mstore(C_V16_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x11) - mstore(C_V17_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x12) - mstore(C_V18_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x13) - mstore(C_V19_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x14) - mstore(C_V20_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x15) - mstore(C_V21_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x16) - mstore(C_V22_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x17) - mstore(C_V23_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x18) - mstore(C_V24_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x19) - mstore(C_V25_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1a) - mstore(C_V26_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1b) - mstore(C_V27_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1c) - mstore(C_V28_LOC, mod(keccak256(0x00, 0x21), p)) - mstore8(0x20, 0x1d) - mstore(C_V29_LOC, mod(keccak256(0x00, 0x21), p)) - - // @follow-up - Why are both v29 and v30 using appending 0x1d to the prior challenge and hashing, should it not change? - mstore8(0x20, 0x1d) - challenge := keccak256(0x00, 0x21) - mstore(C_V30_LOC, mod(challenge, p)) - - // separator - mstore(0x00, challenge) - mstore(0x20, mload(PI_Z_Y_LOC)) - mstore(0x40, mload(PI_Z_X_LOC)) - mstore(0x60, mload(PI_Z_OMEGA_Y_LOC)) - mstore(0x80, mload(PI_Z_OMEGA_X_LOC)) - - mstore(C_U_LOC, mod(keccak256(0x00, 0xa0), p)) - } - - let success := 0 - // VALIDATE T1 - { - let x := mload(T1_X_LOC) - let y := mload(T1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q)) - mstore(ACCUMULATOR_X_LOC, x) - mstore(add(ACCUMULATOR_X_LOC, 0x20), y) - } - // VALIDATE T2 - { - let x := mload(T2_X_LOC) // 0x1400 - let y := mload(T2_Y_LOC) // 0x1420 - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(ZETA_POW_N_LOC)) - // accumulator_2 = [T2].zeta^n - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = [T1] + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE T3 - { - let x := mload(T3_X_LOC) - let y := mload(T3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mload(ZETA_POW_N_LOC), mload(ZETA_POW_N_LOC), p)) - // accumulator_2 = [T3].zeta^{2n} - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE T4 - { - let x := mload(T4_X_LOC) - let y := mload(T4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mulmod(mload(ZETA_POW_N_LOC), mload(ZETA_POW_N_LOC), p), mload(ZETA_POW_N_LOC), p)) - // accumulator_2 = [T4].zeta^{3n} - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W1 - { - let x := mload(W1_X_LOC) - let y := mload(W1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V0_LOC), p)) - // accumulator_2 = v0.(u + 1).[W1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W2 - { - let x := mload(W2_X_LOC) - let y := mload(W2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V1_LOC), p)) - // accumulator_2 = v1.(u + 1).[W2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W3 - { - let x := mload(W3_X_LOC) - let y := mload(W3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V2_LOC), p)) - // accumulator_2 = v2.(u + 1).[W3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE W4 - { - let x := mload(W4_X_LOC) - let y := mload(W4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V3_LOC), p)) - // accumulator_2 = v3.(u + 1).[W4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE S - { - let x := mload(S_X_LOC) - let y := mload(S_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V4_LOC), p)) - // accumulator_2 = v4.(u + 1).[S] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Z - { - let x := mload(Z_X_LOC) - let y := mload(Z_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V5_LOC), p)) - // accumulator_2 = v5.(u + 1).[Z] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Z_LOOKUP - { - let x := mload(Z_LOOKUP_X_LOC) - let y := mload(Z_LOOKUP_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V6_LOC), p)) - // accumulator_2 = v6.(u + 1).[Z_LOOKUP] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q1 - { - let x := mload(Q1_X_LOC) - let y := mload(Q1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V7_LOC)) - // accumulator_2 = v7.[Q1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q2 - { - let x := mload(Q2_X_LOC) - let y := mload(Q2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V8_LOC)) - // accumulator_2 = v8.[Q2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q3 - { - let x := mload(Q3_X_LOC) - let y := mload(Q3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V9_LOC)) - // accumulator_2 = v9.[Q3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE Q4 - { - let x := mload(Q4_X_LOC) - let y := mload(Q4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V10_LOC)) - // accumulator_2 = v10.[Q4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QM - { - let x := mload(QM_X_LOC) - let y := mload(QM_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V11_LOC)) - // accumulator_2 = v11.[Q;] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QC - { - let x := mload(QC_X_LOC) - let y := mload(QC_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V12_LOC)) - // accumulator_2 = v12.[QC] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QARITH - { - let x := mload(QARITH_X_LOC) - let y := mload(QARITH_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V13_LOC)) - // accumulator_2 = v13.[QARITH] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QSORT - { - let x := mload(QSORT_X_LOC) - let y := mload(QSORT_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V14_LOC)) - // accumulator_2 = v14.[QSORT] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QELLIPTIC - { - let x := mload(QELLIPTIC_X_LOC) - let y := mload(QELLIPTIC_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V15_LOC)) - // accumulator_2 = v15.[QELLIPTIC] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE QAUX - { - let x := mload(QAUX_X_LOC) - let y := mload(QAUX_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V16_LOC)) - // accumulator_2 = v15.[Q_AUX] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA1 - { - let x := mload(SIGMA1_X_LOC) - let y := mload(SIGMA1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V17_LOC)) - // accumulator_2 = v17.[sigma1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA2 - { - let x := mload(SIGMA2_X_LOC) - let y := mload(SIGMA2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V18_LOC)) - // accumulator_2 = v18.[sigma2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA3 - { - let x := mload(SIGMA3_X_LOC) - let y := mload(SIGMA3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V19_LOC)) - // accumulator_2 = v19.[sigma3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE SIGMA4 - { - let x := mload(SIGMA4_X_LOC) - let y := mload(SIGMA4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V20_LOC)) - // accumulator_2 = v20.[sigma4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE1 - { - let x := mload(TABLE1_X_LOC) - let y := mload(TABLE1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V21_LOC), p)) - // accumulator_2 = u.[table1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE2 - { - let x := mload(TABLE2_X_LOC) - let y := mload(TABLE2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V22_LOC), p)) - // accumulator_2 = u.[table2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE3 - { - let x := mload(TABLE3_X_LOC) - let y := mload(TABLE3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V23_LOC), p)) - // accumulator_2 = u.[table3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE4 - { - let x := mload(TABLE4_X_LOC) - let y := mload(TABLE4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(addmod(mload(C_U_LOC), 0x1, p), mload(C_V24_LOC), p)) - // accumulator_2 = u.[table4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE TABLE_TYPE - { - let x := mload(TABLE_TYPE_X_LOC) - let y := mload(TABLE_TYPE_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V25_LOC)) - // accumulator_2 = v25.[TableType] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID1 - { - let x := mload(ID1_X_LOC) - let y := mload(ID1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V26_LOC)) - // accumulator_2 = v26.[ID1] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID2 - { - let x := mload(ID2_X_LOC) - let y := mload(ID2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V27_LOC)) - // accumulator_2 = v27.[ID2] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID3 - { - let x := mload(ID3_X_LOC) - let y := mload(ID3_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V28_LOC)) - // accumulator_2 = v28.[ID3] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE ID4 - { - let x := mload(ID4_X_LOC) - let y := mload(ID4_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mload(C_V29_LOC)) - // accumulator_2 = v29.[ID4] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - /** - * COMPUTE BATCH EVALUATION SCALAR MULTIPLIER - */ - { - /** - * batch_evaluation = v0 * (w_1_omega * u + w_1_eval) - * batch_evaluation += v1 * (w_2_omega * u + w_2_eval) - * batch_evaluation += v2 * (w_3_omega * u + w_3_eval) - * batch_evaluation += v3 * (w_4_omega * u + w_4_eval) - * batch_evaluation += v4 * (s_omega_eval * u + s_eval) - * batch_evaluation += v5 * (z_omega_eval * u + z_eval) - * batch_evaluation += v6 * (z_lookup_omega_eval * u + z_lookup_eval) - */ - let batch_evaluation := - mulmod( - mload(C_V0_LOC), - addmod(mulmod(mload(W1_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W1_EVAL_LOC), p), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V1_LOC), - addmod(mulmod(mload(W2_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W2_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V2_LOC), - addmod(mulmod(mload(W3_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W3_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V3_LOC), - addmod(mulmod(mload(W4_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(W4_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V4_LOC), - addmod(mulmod(mload(S_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(S_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V5_LOC), - addmod(mulmod(mload(Z_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(Z_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V6_LOC), - addmod(mulmod(mload(Z_LOOKUP_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(Z_LOOKUP_EVAL_LOC), p), - p - ), - p - ) - - /** - * batch_evaluation += v7 * Q1_EVAL - * batch_evaluation += v8 * Q2_EVAL - * batch_evaluation += v9 * Q3_EVAL - * batch_evaluation += v10 * Q4_EVAL - * batch_evaluation += v11 * QM_EVAL - * batch_evaluation += v12 * QC_EVAL - * batch_evaluation += v13 * QARITH_EVAL - * batch_evaluation += v14 * QSORT_EVAL_LOC - * batch_evaluation += v15 * QELLIPTIC_EVAL_LOC - * batch_evaluation += v16 * QAUX_EVAL_LOC - * batch_evaluation += v17 * SIGMA1_EVAL_LOC - * batch_evaluation += v18 * SIGMA2_EVAL_LOC - * batch_evaluation += v19 * SIGMA3_EVAL_LOC - * batch_evaluation += v20 * SIGMA4_EVAL_LOC - */ - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V7_LOC), mload(Q1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V8_LOC), mload(Q2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V9_LOC), mload(Q3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V10_LOC), mload(Q4_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V11_LOC), mload(QM_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V12_LOC), mload(QC_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V13_LOC), mload(QARITH_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V14_LOC), mload(QSORT_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V15_LOC), mload(QELLIPTIC_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V16_LOC), mload(QAUX_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V17_LOC), mload(SIGMA1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V18_LOC), mload(SIGMA2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V19_LOC), mload(SIGMA3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V20_LOC), mload(SIGMA4_EVAL_LOC), p), p) - - /** - * batch_evaluation += v21 * (table1(zw) * u + table1(z)) - * batch_evaluation += v22 * (table2(zw) * u + table2(z)) - * batch_evaluation += v23 * (table3(zw) * u + table3(z)) - * batch_evaluation += v24 * (table4(zw) * u + table4(z)) - * batch_evaluation += v25 * table_type_eval - * batch_evaluation += v26 * id1_eval - * batch_evaluation += v27 * id2_eval - * batch_evaluation += v28 * id3_eval - * batch_evaluation += v29 * id4_eval - * batch_evaluation += quotient_eval - */ - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V21_LOC), - addmod(mulmod(mload(TABLE1_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE1_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V22_LOC), - addmod(mulmod(mload(TABLE2_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE2_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V23_LOC), - addmod(mulmod(mload(TABLE3_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE3_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := - addmod( - batch_evaluation, - mulmod( - mload(C_V24_LOC), - addmod(mulmod(mload(TABLE4_OMEGA_EVAL_LOC), mload(C_U_LOC), p), mload(TABLE4_EVAL_LOC), p), - p - ), - p - ) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V25_LOC), mload(TABLE_TYPE_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V26_LOC), mload(ID1_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V27_LOC), mload(ID2_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V28_LOC), mload(ID3_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mulmod(mload(C_V29_LOC), mload(ID4_EVAL_LOC), p), p) - batch_evaluation := addmod(batch_evaluation, mload(QUOTIENT_EVAL_LOC), p) - - mstore(0x00, 0x01) // [1].x - mstore(0x20, 0x02) // [1].y - mstore(0x40, sub(p, batch_evaluation)) - // accumulator_2 = -[1].(batch_evaluation) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - mstore(OPENING_COMMITMENT_SUCCESS_FLAG, success) - } - - /** - * PERFORM PAIRING PREAMBLE - */ - { - let u := mload(C_U_LOC) - let zeta := mload(C_ZETA_LOC) - // VALIDATE PI_Z - { - let x := mload(PI_Z_X_LOC) - let y := mload(PI_Z_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q)) - mstore(0x00, x) - mstore(0x20, y) - } - // compute zeta.[PI_Z] and add into accumulator - mstore(0x40, zeta) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // accumulator = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, ACCUMULATOR_X_LOC, 0x40)) - - // VALIDATE PI_Z_OMEGA - { - let x := mload(PI_Z_OMEGA_X_LOC) - let y := mload(PI_Z_OMEGA_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - mstore(0x40, mulmod(mulmod(u, zeta, p), mload(OMEGA_LOC), p)) - // accumulator_2 = u.zeta.omega.[PI_Z_OMEGA] - success := and(success, staticcall(gas(), 7, 0x00, 0x60, ACCUMULATOR2_X_LOC, 0x40)) - // PAIRING_RHS = accumulator + accumulator_2 - success := and(success, staticcall(gas(), 6, ACCUMULATOR_X_LOC, 0x80, PAIRING_RHS_X_LOC, 0x40)) - - mstore(0x00, mload(PI_Z_X_LOC)) - mstore(0x20, mload(PI_Z_Y_LOC)) - mstore(0x40, mload(PI_Z_OMEGA_X_LOC)) - mstore(0x60, mload(PI_Z_OMEGA_Y_LOC)) - mstore(0x80, u) - success := and(success, staticcall(gas(), 7, 0x40, 0x60, 0x40, 0x40)) - // PAIRING_LHS = [PI_Z] + [PI_Z_OMEGA] * u - success := and(success, staticcall(gas(), 6, 0x00, 0x80, PAIRING_LHS_X_LOC, 0x40)) - // negate lhs y-coordinate - mstore(PAIRING_LHS_Y_LOC, sub(q, mload(PAIRING_LHS_Y_LOC))) - - if mload(CONTAINS_RECURSIVE_PROOF_LOC) { - // VALIDATE RECURSIVE P1 - { - let x := mload(RECURSIVE_P1_X_LOC) - let y := mload(RECURSIVE_P1_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - - // compute u.u.[recursive_p1] and write into 0x60 - mstore(0x40, mulmod(u, u, p)) - success := and(success, staticcall(gas(), 7, 0x00, 0x60, 0x60, 0x40)) - // VALIDATE RECURSIVE P2 - { - let x := mload(RECURSIVE_P2_X_LOC) - let y := mload(RECURSIVE_P2_Y_LOC) - let xx := mulmod(x, x, q) - // validate on curve - success := and(success, eq(mulmod(y, y, q), addmod(mulmod(x, xx, q), 3, q))) - mstore(0x00, x) - mstore(0x20, y) - } - // compute u.u.[recursive_p2] and write into 0x00 - // 0x40 still contains u*u - success := and(success, staticcall(gas(), 7, 0x00, 0x60, 0x00, 0x40)) - - // compute u.u.[recursiveP1] + rhs and write into rhs - mstore(0xa0, mload(PAIRING_RHS_X_LOC)) - mstore(0xc0, mload(PAIRING_RHS_Y_LOC)) - success := and(success, staticcall(gas(), 6, 0x60, 0x80, PAIRING_RHS_X_LOC, 0x40)) - - // compute u.u.[recursiveP2] + lhs and write into lhs - mstore(0x40, mload(PAIRING_LHS_X_LOC)) - mstore(0x60, mload(PAIRING_LHS_Y_LOC)) - success := and(success, staticcall(gas(), 6, 0x00, 0x80, PAIRING_LHS_X_LOC, 0x40)) - } - - if iszero(success) { - mstore(0x0, EC_SCALAR_MUL_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - mstore(PAIRING_PREAMBLE_SUCCESS_FLAG, success) - } - - /** - * PERFORM PAIRING - */ - { - // rhs paired with [1]_2 - // lhs paired with [x]_2 - - mstore(0x00, mload(PAIRING_RHS_X_LOC)) - mstore(0x20, mload(PAIRING_RHS_Y_LOC)) - mstore(0x40, 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2) // this is [1]_2 - mstore(0x60, 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed) - mstore(0x80, 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b) - mstore(0xa0, 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa) - - mstore(0xc0, mload(PAIRING_LHS_X_LOC)) - mstore(0xe0, mload(PAIRING_LHS_Y_LOC)) - mstore(0x100, mload(G2X_X0_LOC)) - mstore(0x120, mload(G2X_X1_LOC)) - mstore(0x140, mload(G2X_Y0_LOC)) - mstore(0x160, mload(G2X_Y1_LOC)) - - success := staticcall(gas(), 8, 0x00, 0x180, 0x00, 0x20) - mstore(PAIRING_SUCCESS_FLAG, success) - mstore(RESULT_FLAG, mload(0x00)) - } - if iszero( - and( - and(and(mload(PAIRING_SUCCESS_FLAG), mload(RESULT_FLAG)), mload(PAIRING_PREAMBLE_SUCCESS_FLAG)), - mload(OPENING_COMMITMENT_SUCCESS_FLAG) - ) - ) { - mstore(0x0, PROOF_FAILURE_SELECTOR) - revert(0x00, 0x04) - } - { - mstore(0x00, 0x01) - return(0x00, 0x20) // Proof succeeded! - } - } - } -} - -contract UltraVerifier is BaseUltraVerifier { - function getVerificationKeyHash() public pure override(BaseUltraVerifier) returns (bytes32) { - return UltraVerificationKey.verificationKeyHash(); - } - - function loadVerificationKey(uint256 vk, uint256 _omegaInverseLoc) internal pure virtual override(BaseUltraVerifier) { - UltraVerificationKey.loadVerificationKey(vk, _omegaInverseLoc); - } -} diff --git a/tooling/bb_abstraction_leaks/src/lib.rs b/tooling/bb_abstraction_leaks/src/lib.rs index fec53809ad4..56a4f58cd21 100644 --- a/tooling/bb_abstraction_leaks/src/lib.rs +++ b/tooling/bb_abstraction_leaks/src/lib.rs @@ -7,13 +7,6 @@ pub const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; pub const BB_DOWNLOAD_URL: &str = env!("BB_BINARY_URL"); pub const BB_VERSION: &str = env!("BB_VERSION"); -/// Embed the Solidity verifier file -const ULTRA_VERIFIER_CONTRACT: &str = include_str!("contract.sol"); - -pub fn complete_barretenberg_verifier_contract(contract: String) -> String { - format!("{contract}{ULTRA_VERIFIER_CONTRACT}") -} - /// Removes the public inputs which are prepended to a proof by Barretenberg. pub fn remove_public_inputs(num_pub_inputs: usize, proof: &[u8]) -> Vec { // Barretenberg prepends the public inputs onto the proof so we need to remove diff --git a/tooling/debugger/Cargo.toml b/tooling/debugger/Cargo.toml index 8476c63060c..785eacf9463 100644 --- a/tooling/debugger/Cargo.toml +++ b/tooling/debugger/Cargo.toml @@ -6,7 +6,8 @@ authors.workspace = true edition.workspace = true license.workspace = true -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[build-dependencies] +build-data.workspace = true [dependencies] acvm.workspace = true @@ -21,4 +22,10 @@ codespan-reporting.workspace = true dap.workspace = true easy-repl = "0.2.1" owo-colors = "3" -serde_json.workspace = true \ No newline at end of file +serde_json.workspace = true + +[dev-dependencies] +assert_cmd = "2.0.12" +rexpect = "0.5.0" +test-binary = "3.0.1" +tempfile.workspace = true diff --git a/tooling/debugger/README.md b/tooling/debugger/README.md index 964784cb730..0ec3b6f0cd4 100644 --- a/tooling/debugger/README.md +++ b/tooling/debugger/README.md @@ -315,25 +315,105 @@ Finished execution Upon quitting the debugger after a solved circuit, the resulting circuit witness gets saved, equivalent to what would happen if we had run the same circuit with `nargo execute`. -## VS Code extension -We're working on extending Noir's official VS Code extension so it uses the editor's debugger facilities to debug Noir programs. +# Testing experimental features -This section will soon show how to load the extension from sources, in order to test the debugger. +There's a number of features that are in active development and that can't yet be merged to the main branch for different reasons. In this section we detail what those features are and how to try them out. +## Build from experimental branch at fork -## Variable value inspection (unstable) +Build Nargo by pulling the source version from https://github.com/manastech/noir/tree/dap-with-vars. -To enable the inspection of variable values at runtime from the debugger, we're in the process of instrumenting the compiler to track and collect the necessary mappings between source code level variable names and ACIR/Brillig state. +This will result in a Nargo binary being written to `PROJECT_ROOT/target/debug/nargo`. We will use this path later, so keep it at hand or export it to a an env var. For example: -At the time of writing, there are still some parts of the language that haven't been fully instrumented, which means certain programs will crash when compiled with this. +`export NARGO_EXP=PROJECT_ROOT/target/debug/nargo` -It is however possible to try out this feature, both from the REPL and VS Code, by building Nargo from branch https://github.com/manastech/noir/tree/dap-with-vars. +## About the experimental features -We'll soon expand this section with details on how to do so for the adventurous. +There are currently 2 experimental features in the debugger: +- Variables inspection +- Stacktrace inspection -## Towards debugging contracts +NOTE: Supporting variables inspection requires extensive instrumentation of the compiler, handling all cases of variable creation, types, and value assignment. At the time of writing this README, some cases are still not supported. For example, if your program uses slices or references, this compiler version might panic when trying to compile them, or at some point during the debugger step-by-step execution. This is the main reason why this feature has not yet been merged into master. + +## Trying out REPL experimental features + +To try out these features, go through the same steps as described at the REPL Debugger section above, but instead of using `nargo debug` use `$NARGO_EXP debug` (assuming you exported your custom built Nargo binary to NARGO_EXP). + +When entering `help` on this version, you'll find two new commands: + +``` +... +stacktrace display the current stack trace +... +vars show variable values available at this point + in execution +``` + +Running `vars` will print the current variables in scope, and its current values: + +``` +At /mul_1/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> vars +y:UnsignedInteger { width: 32 }=Field(4), z:UnsignedInteger { width: 32 }=Field(2¹⁶×6561), x:UnsignedInteger { width: 32 }=Field(2⁴×9) +> +``` + +Running `stacktrace` will print information about the current frame in the stacktrace: + +``` +> stacktrace +Frame #0, opcode 12: EXPR [ (1, _5, _5) (-1, _6) 0 ] +At /1_mul/src/main.nr:6:5 + 1 // Test unsafe integer multiplication with overflow: 12^8 = 429 981 696 + 2 // The circuit should handle properly the growth of the bit size + 3 fn main(mut x: u32, y: u32, z: u32) { + 4 x *= y; + 5 x *= x; //144 + 6 -> x *= x; //20736 + 7 x *= x; //429 981 696 + 8 assert(x == z); + 9 } +> +``` + +## Testing the VS Code extension (experimental) + +There is a fork of the official Noir Visual Studio extension which enables the debugger in VS Code. This fork is at: https://github.com/manastech/vscode-noir/tree/dap-support. + +In this section, we'll explain how to test the VS Code Noir debugger combining that extension fork with the experimental features branch discussed above. + +1. First, get a copy of the extension source code from https://github.com/manastech/vscode-noir/tree/dap-support. + +2. Package the extension by running `npm run package`. + +3. Open the root folder of the extension on VS Code. + +4. From VS Code, press fn+F5. This will open a new VS Code window with the extension loaded from source. + +5. Go to Code -> Settings -> Extensions -> Noir Language Server. Look for the property `Nargo Path` and enter the path to the experimental build you got as a result of following the steps at [Trying out REPL experimental features](#trying-out-repl-experimental-features). + +6. At the VS Code sidebar, go to the debugger section (see screenshot). Click "Add configuration". Overwrite the `projectFolder` property with the absolute path to the Nargo project you want to debug. + +Screenshot 2023-12-18 at 14 37 38 + +7. Go to a Noir file you want to debug. Navigate again to the debug section of VS Code, and click the "play" icon. + +The debugger should now have started. Current features exposed to the debugger include different kinds of stepping interactions, variable inspection and stacktraces. At the time of writing, Brillig registers and memory are not being exposed, but they will soon be. + +![Screen Recording 2023-12-18 at 14 14 28](https://github.com/manastech/noir/assets/651693/36b4becb-953a-4158-9c5a-7a185673f54f) + +## Towards debugging contracts ### Contracts Runtime diff --git a/tooling/debugger/build.rs b/tooling/debugger/build.rs new file mode 100644 index 00000000000..26a8bc64b0e --- /dev/null +++ b/tooling/debugger/build.rs @@ -0,0 +1,72 @@ +use std::collections::HashSet; +use std::fs::File; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::{env, fs}; + +const GIT_COMMIT: &&str = &"GIT_COMMIT"; + +fn main() { + // Only use build_data if the environment variable isn't set + // The environment variable is always set when working via Nix + if std::env::var(GIT_COMMIT).is_err() { + build_data::set_GIT_COMMIT(); + build_data::set_GIT_DIRTY(); + build_data::no_debug_rebuilds(); + } + + let out_dir = env::var("OUT_DIR").unwrap(); + let destination = Path::new(&out_dir).join("debug.rs"); + let mut test_file = File::create(destination).unwrap(); + + // Try to find the directory that Cargo sets when it is running; otherwise fallback to assuming the CWD + // is the root of the repository and append the crate path + let root_dir = match std::env::var("CARGO_MANIFEST_DIR") { + Ok(dir) => PathBuf::from(dir).parent().unwrap().parent().unwrap().to_path_buf(), + Err(_) => std::env::current_dir().unwrap(), + }; + let test_dir = root_dir.join("test_programs"); + + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + println!("cargo:rerun-if-changed=ignored-tests.txt"); + println!("cargo:rerun-if-changed={}", test_dir.as_os_str().to_str().unwrap()); + + generate_debugger_tests(&mut test_file, &test_dir); +} + +fn generate_debugger_tests(test_file: &mut File, test_data_dir: &Path) { + let test_sub_dir = "execution_success"; + let test_data_dir = test_data_dir.join(test_sub_dir); + + let test_case_dirs = + fs::read_dir(test_data_dir).unwrap().flatten().filter(|c| c.path().is_dir()); + let ignored_tests_contents = fs::read_to_string("ignored-tests.txt").unwrap(); + let ignored_tests = ignored_tests_contents.lines().collect::>(); + + for test_dir in test_case_dirs { + let test_name = + test_dir.file_name().into_string().expect("Directory can't be converted to string"); + let ignored = ignored_tests.contains(test_name.as_str()); + if test_name.contains('-') { + panic!( + "Invalid test directory: {test_name}. Cannot include `-`, please convert to `_`" + ); + }; + let test_dir = &test_dir.path(); + + write!( + test_file, + r#" +#[test] +{ignored} +fn debug_{test_name}() {{ + debugger_execution_success("{test_dir}"); +}} + "#, + test_dir = test_dir.display(), + ignored = if ignored { "#[ignore]" } else { "" }, + ) + .expect("Could not write templated test file."); + } +} diff --git a/tooling/debugger/ignored-tests.txt b/tooling/debugger/ignored-tests.txt new file mode 100644 index 00000000000..a9aeb44051f --- /dev/null +++ b/tooling/debugger/ignored-tests.txt @@ -0,0 +1,17 @@ +array_sort +assign_ex +bit_shifts_comptime +brillig_cow +brillig_nested_arrays +brillig_references +brillig_to_bytes_integration +debug_logs +double_verify_proof +modulus +nested_array_dynamic +nested_array_in_slice +nested_arrays_from_brillig +references +signed_comparison +simple_2d_array +to_bytes_integration diff --git a/tooling/debugger/src/context.rs b/tooling/debugger/src/context.rs index 9d3787a8ff7..34e5ead54d8 100644 --- a/tooling/debugger/src/context.rs +++ b/tooling/debugger/src/context.rs @@ -1,3 +1,4 @@ +use crate::foreign_calls::DebugForeignCallExecutor; use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; use acvm::acir::native_types::{Witness, WitnessMap}; use acvm::brillig_vm::{brillig::Value, Registers}; @@ -6,9 +7,8 @@ use acvm::pwg::{ }; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::artifacts::debug::{DebugArtifact, DebugVars}; +use nargo::artifacts::debug::DebugArtifact; use nargo::errors::{ExecutionError, Location}; -use nargo::ops::ForeignCallExecutor; use nargo::NargoError; use noirc_printable_type::{PrintableType, PrintableValue}; @@ -25,9 +25,8 @@ pub(super) enum DebugCommandResult { pub(super) struct DebugContext<'a, B: BlackBoxFunctionSolver> { acvm: ACVM<'a, B>, brillig_solver: Option>, - foreign_call_executor: Box, + foreign_call_executor: Box, debug_artifact: &'a DebugArtifact, - debug_vars: DebugVars, breakpoints: HashSet, } @@ -37,20 +36,13 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { circuit: &'a Circuit, debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, - foreign_call_executor: Box, + foreign_call_executor: Box, ) -> Self { - // TODO: move this into the other context constructor - let mut debug_vars = DebugVars::default(); - debug_artifact.debug_symbols.iter().for_each(|info| { - debug_vars.insert_variables(&info.variables); - debug_vars.insert_types(&info.types); - }); Self { acvm: ACVM::new(blackbox_solver, &circuit.opcodes, initial_witness), brillig_solver: None, foreign_call_executor, debug_artifact, - debug_vars, breakpoints: HashSet::new(), } } @@ -294,8 +286,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } fn handle_foreign_call(&mut self, foreign_call: ForeignCallWaitInfo) -> DebugCommandResult { - let foreign_call_result = - self.foreign_call_executor.execute_with_debug_vars(&foreign_call, &mut self.debug_vars); + let foreign_call_result = self.foreign_call_executor.execute(&foreign_call); match foreign_call_result { Ok(foreign_call_result) => { if let Some(mut solver) = self.brillig_solver.take() { @@ -480,7 +471,7 @@ impl<'a, B: BlackBoxFunctionSolver> DebugContext<'a, B> { } pub(super) fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { - return self.debug_vars.get_variables(); + return self.foreign_call_executor.get_variables(); } fn breakpoint_reached(&self) -> bool { @@ -543,6 +534,7 @@ mod tests { use super::*; use crate::context::{DebugCommandResult, DebugContext}; + use crate::foreign_calls::DefaultDebugForeignCallExecutor; use acvm::{ acir::{ circuit::{ @@ -551,59 +543,20 @@ mod tests { }, native_types::Expression, }, + blackbox_solver::StubbedBlackBoxSolver, brillig_vm::brillig::{ BinaryFieldOp, Opcode as BrilligOpcode, RegisterIndex, RegisterOrMemory, }, }; - use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor}; + use nargo::artifacts::debug::DebugArtifact; use std::collections::BTreeMap; - struct StubbedSolver; - - impl BlackBoxFunctionSolver for StubbedSolver { - fn schnorr_verify( - &self, - _public_key_x: &FieldElement, - _public_key_y: &FieldElement, - _signature: &[u8], - _message: &[u8], - ) -> Result { - unimplemented!(); - } - - fn pedersen_commitment( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result<(FieldElement, FieldElement), acvm::BlackBoxResolutionError> { - unimplemented!(); - } - - fn pedersen_hash( - &self, - _inputs: &[FieldElement], - _domain_separator: u32, - ) -> Result { - unimplemented!(); - } - - fn fixed_base_scalar_mul( - &self, - _low: &FieldElement, - _high: &FieldElement, - ) -> Result<(FieldElement, FieldElement), acvm::BlackBoxResolutionError> { - unimplemented!(); - } - } - #[test] fn test_resolve_foreign_calls_stepping_into_brillig() { let fe_0 = FieldElement::zero(); let fe_1 = FieldElement::one(); let w_x = Witness(1); - let blackbox_solver = &StubbedSolver; - let brillig_opcodes = Brillig { inputs: vec![BrilligInputs::Single(Expression { linear_combinations: vec![(fe_1, w_x)], @@ -635,12 +588,14 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1)]).into(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, &debug_artifact)); let mut context = DebugContext::new( - blackbox_solver, + &StubbedBlackBoxSolver, circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + foreign_call_executor, ); assert_eq!(context.get_current_opcode_location(), Some(OpcodeLocation::Acir(0))); @@ -683,8 +638,6 @@ mod tests { let w_y = Witness(2); let w_z = Witness(3); - let blackbox_solver = &StubbedSolver; - // This Brillig block is equivalent to: z = x + y let brillig_opcodes = Brillig { inputs: vec![ @@ -713,7 +666,7 @@ mod tests { // z = x + y Opcode::Brillig(brillig_opcodes), // x + y - z = 0 - Opcode::Arithmetic(Expression { + Opcode::AssertZero(Expression { mul_terms: vec![], linear_combinations: vec![(fe_1, w_x), (fe_1, w_y), (-fe_1, w_z)], q_c: fe_0, @@ -729,12 +682,14 @@ mod tests { let initial_witness = BTreeMap::from([(Witness(1), fe_1), (Witness(2), fe_1)]).into(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, &debug_artifact)); let mut context = DebugContext::new( - blackbox_solver, + &StubbedBlackBoxSolver, circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + foreign_call_executor, ); // set breakpoint @@ -759,7 +714,6 @@ mod tests { #[test] fn test_offset_opcode_location() { - let blackbox_solver = &StubbedSolver; let opcodes = vec![ Opcode::Brillig(Brillig { inputs: vec![], @@ -774,17 +728,17 @@ mod tests { bytecode: vec![BrilligOpcode::Stop, BrilligOpcode::Stop, BrilligOpcode::Stop], predicate: None, }), - Opcode::Arithmetic(Expression::default()), + Opcode::AssertZero(Expression::default()), ]; let circuit = Circuit { opcodes, ..Circuit::default() }; let debug_artifact = DebugArtifact { debug_symbols: vec![], file_map: BTreeMap::new(), warnings: vec![] }; let context = DebugContext::new( - blackbox_solver, + &StubbedBlackBoxSolver, &circuit, &debug_artifact, WitnessMap::new(), - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultDebugForeignCallExecutor::new(true)), ); assert_eq!(context.offset_opcode_location(&None, 0), (None, 0)); diff --git a/tooling/debugger/src/dap.rs b/tooling/debugger/src/dap.rs index 9256f574835..17ff81c52f0 100644 --- a/tooling/debugger/src/dap.rs +++ b/tooling/debugger/src/dap.rs @@ -10,6 +10,7 @@ use codespan_reporting::files::{Files, SimpleFile}; use crate::context::DebugCommandResult; use crate::context::DebugContext; +use crate::foreign_calls::DefaultDebugForeignCallExecutor; use dap::errors::ServerError; use dap::events::StoppedEventBody; @@ -26,7 +27,6 @@ use dap::types::{ StoppedEventReason, Thread, Variable, }; use nargo::artifacts::debug::DebugArtifact; -use nargo::ops::DefaultForeignCallExecutor; use fm::FileId; use noirc_driver::CompiledProgram; @@ -76,7 +76,7 @@ impl<'a, R: Read, W: Write, B: BlackBoxFunctionSolver> DapSession<'a, R, W, B> { circuit, debug_artifact, initial_witness, - Box::new(DefaultForeignCallExecutor::new(true)), + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)), ); Self { server, diff --git a/tooling/debugger/src/foreign_calls.rs b/tooling/debugger/src/foreign_calls.rs new file mode 100644 index 00000000000..4091f73fb69 --- /dev/null +++ b/tooling/debugger/src/foreign_calls.rs @@ -0,0 +1,156 @@ +use acvm::{ + acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, +}; +use nargo::{ + artifacts::debug::{DebugArtifact, DebugVars}, + ops::{DefaultForeignCallExecutor, ForeignCallExecutor}, +}; +use noirc_printable_type::{ForeignCallError, PrintableType, PrintableValue}; + +pub(crate) enum DebugForeignCall { + VarAssign, + VarDrop, + MemberAssign(u32), + DerefAssign, +} + +impl std::fmt::Display for DebugForeignCall { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name()) + } +} + +impl DebugForeignCall { + pub(crate) fn name(&self) -> &'static str { + match self { + DebugForeignCall::VarAssign => "__debug_var_assign", + DebugForeignCall::VarDrop => "__debug_var_drop", + DebugForeignCall::MemberAssign(1) => "__debug_member_assign_1", + DebugForeignCall::MemberAssign(2) => "__debug_member_assign_2", + DebugForeignCall::MemberAssign(3) => "__debug_member_assign_3", + DebugForeignCall::MemberAssign(4) => "__debug_member_assign_4", + DebugForeignCall::MemberAssign(5) => "__debug_member_assign_5", + DebugForeignCall::MemberAssign(6) => "__debug_member_assign_6", + DebugForeignCall::MemberAssign(7) => "__debug_member_assign_7", + DebugForeignCall::MemberAssign(8) => "__debug_member_assign_8", + DebugForeignCall::MemberAssign(_) => panic!("unsupported member assignment arity"), + DebugForeignCall::DerefAssign => "__debug_deref_assign", + } + } + + pub(crate) fn lookup(op_name: &str) -> Option { + let member_pre = "__debug_member_assign_"; + if let Some(op_suffix) = op_name.strip_prefix(member_pre) { + let arity = + op_suffix.parse::().expect("failed to parse debug_member_assign arity"); + return Some(DebugForeignCall::MemberAssign(arity)); + } + match op_name { + "__debug_var_assign" => Some(DebugForeignCall::VarAssign), + "__debug_var_drop" => Some(DebugForeignCall::VarDrop), + "__debug_deref_assign" => Some(DebugForeignCall::DerefAssign), + _ => None, + } + } +} + +pub trait DebugForeignCallExecutor: ForeignCallExecutor { + fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)>; +} + +pub struct DefaultDebugForeignCallExecutor { + executor: DefaultForeignCallExecutor, + pub debug_vars: DebugVars, +} + +impl DefaultDebugForeignCallExecutor { + pub fn new(show_output: bool) -> Self { + Self { + executor: DefaultForeignCallExecutor::new(show_output, None), + debug_vars: DebugVars::default(), + } + } + + pub fn from_artifact(show_output: bool, artifact: &DebugArtifact) -> Self { + let mut ex = Self::new(show_output); + ex.load_artifact(artifact); + ex + } + + pub fn load_artifact(&mut self, artifact: &DebugArtifact) { + artifact.debug_symbols.iter().for_each(|info| { + self.debug_vars.insert_variables(&info.variables); + self.debug_vars.insert_types(&info.types); + }); + } +} + +impl DebugForeignCallExecutor for DefaultDebugForeignCallExecutor { + fn get_variables(&self) -> Vec<(&str, &PrintableValue, &PrintableType)> { + self.debug_vars.get_variables() + } +} + +impl ForeignCallExecutor for DefaultDebugForeignCallExecutor { + fn execute( + &mut self, + foreign_call: &ForeignCallWaitInfo, + ) -> Result { + let foreign_call_name = foreign_call.function.as_str(); + match DebugForeignCall::lookup(foreign_call_name) { + Some(DebugForeignCall::VarAssign) => { + let fcp_var_id = &foreign_call.inputs[0]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = var_id_value.to_u128() as u32; + let values: Vec = + foreign_call.inputs[1..].iter().flat_map(|x| x.values()).collect(); + self.debug_vars.assign(var_id, &values); + } + Ok(ForeignCallResult { values: vec![] }) + } + Some(DebugForeignCall::VarDrop) => { + let fcp_var_id = &foreign_call.inputs[0]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = var_id_value.to_u128() as u32; + self.debug_vars.drop(var_id); + } + Ok(ForeignCallResult { values: vec![] }) + } + Some(DebugForeignCall::MemberAssign(arity)) => { + if let Some(ForeignCallParam::Single(var_id_value)) = foreign_call.inputs.get(0) { + let arity = arity as usize; + let var_id = var_id_value.to_u128() as u32; + let n = foreign_call.inputs.len(); + let indexes: Vec = foreign_call.inputs[(n - arity)..n] + .iter() + .map(|fcp_v| { + if let ForeignCallParam::Single(v) = fcp_v { + v.to_u128() as u32 + } else { + panic!("expected ForeignCallParam::Single(v)"); + } + }) + .collect(); + let values: Vec = (0..n - 1 - arity) + .flat_map(|i| { + foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) + }) + .collect(); + self.debug_vars.assign_field(var_id, indexes, &values); + } + Ok(ForeignCallResult { values: vec![] }) + } + Some(DebugForeignCall::DerefAssign) => { + let fcp_var_id = &foreign_call.inputs[0]; + let fcp_value = &foreign_call.inputs[1]; + if let ForeignCallParam::Single(var_id_value) = fcp_var_id { + let var_id = var_id_value.to_u128() as u32; + self.debug_vars.assign_deref(var_id, &fcp_value.values()); + } + Ok(ForeignCallResult { values: vec![] }) + } + None => self.executor.execute(foreign_call), + } + } +} diff --git a/tooling/debugger/src/lib.rs b/tooling/debugger/src/lib.rs index f4440f57332..4a25e3417a0 100644 --- a/tooling/debugger/src/lib.rs +++ b/tooling/debugger/src/lib.rs @@ -1,7 +1,9 @@ mod context; mod dap; pub mod errors; +mod foreign_calls; mod repl; +mod source_code_printer; use std::io::{Read, Write}; diff --git a/tooling/debugger/src/repl.rs b/tooling/debugger/src/repl.rs index efb1d8de7ab..d354f032ea3 100644 --- a/tooling/debugger/src/repl.rs +++ b/tooling/debugger/src/repl.rs @@ -4,18 +4,14 @@ use acvm::acir::circuit::{Circuit, Opcode, OpcodeLocation}; use acvm::acir::native_types::{Witness, WitnessMap}; use acvm::{BlackBoxFunctionSolver, FieldElement}; -use nargo::{artifacts::debug::DebugArtifact, ops::DefaultForeignCallExecutor, NargoError}; +use crate::foreign_calls::DefaultDebugForeignCallExecutor; +use nargo::{artifacts::debug::DebugArtifact, NargoError}; use easy_repl::{command, CommandStatus, Repl}; use noirc_printable_type::PrintableValueDisplay; use std::cell::RefCell; -use codespan_reporting::files::Files; -use noirc_errors::Location; - -use owo_colors::OwoColorize; - -use std::ops::Range; +use crate::source_code_printer::print_source_code_location; pub struct ReplDebugger<'a, B: BlackBoxFunctionSolver> { context: DebugContext<'a, B>, @@ -33,12 +29,14 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { debug_artifact: &'a DebugArtifact, initial_witness: WitnessMap, ) -> Self { + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, debug_artifact)); let context = DebugContext::new( blackbox_solver, circuit, debug_artifact, initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + foreign_call_executor, ); Self { context, @@ -71,7 +69,8 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } - self.show_source_code_location(&location); + let locations = self.context.get_source_location_for_opcode_location(&location); + print_source_code_location(self.debug_artifact, &locations); } } } @@ -92,7 +91,8 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { ); } } - self.show_source_code_location(location); + let locations = self.context.get_source_location_for_opcode_location(location); + print_source_code_location(self.debug_artifact, &locations); } pub fn show_current_call_stack(&self) { @@ -107,71 +107,6 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } - fn print_location_path(&self, loc: Location) { - let line_number = self.debug_artifact.location_line_number(loc).unwrap(); - let column_number = self.debug_artifact.location_column_number(loc).unwrap(); - - println!( - "At {}:{line_number}:{column_number}", - self.debug_artifact.name(loc.file).unwrap() - ); - } - - fn show_source_code_location(&self, location: &OpcodeLocation) { - let locations = self.context.get_source_location_for_opcode_location(location); - for loc in locations { - self.print_location_path(loc); - - let loc_line_index = self.debug_artifact.location_line_index(loc).unwrap(); - - // How many lines before or after the location's line we print - let context_lines = 5; - - let first_line_to_print = - if loc_line_index < context_lines { 0 } else { loc_line_index - context_lines }; - - let last_line_index = self.debug_artifact.last_line_index(loc).unwrap(); - let last_line_to_print = std::cmp::min(loc_line_index + context_lines, last_line_index); - - let source = self.debug_artifact.location_source_code(loc).unwrap(); - for (current_line_index, line) in source.lines().enumerate() { - let current_line_number = current_line_index + 1; - - if current_line_index < first_line_to_print { - // Ignore lines before range starts - continue; - } else if current_line_index == first_line_to_print && current_line_index > 0 { - // Denote that there's more lines before but we're not showing them - print_line_of_ellipsis(current_line_index); - } - - if current_line_index > last_line_to_print { - // Denote that there's more lines after but we're not - // showing them, and stop printing - print_line_of_ellipsis(current_line_number); - break; - } - - if current_line_index == loc_line_index { - // Highlight current location - let Range { start: loc_start, end: mut loc_end } = - self.debug_artifact.location_in_line(loc).unwrap(); - loc_end = loc_end.min(line.len()); - println!( - "{:>3} {:2} {}{}{}", - current_line_number, - "->", - &line[0..loc_start].to_string().dimmed(), - &line[loc_start..loc_end], - &line[loc_end..].to_string().dimmed() - ); - } else { - print_dimmed_line(current_line_number, line); - } - } - } - } - fn display_opcodes(&self) { let opcodes = self.context.get_opcodes(); let current_opcode_location = self.context.get_current_opcode_location(); @@ -318,12 +253,14 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { fn restart_session(&mut self) { let breakpoints: Vec = self.context.iterate_breakpoints().copied().collect(); + let foreign_call_executor = + Box::new(DefaultDebugForeignCallExecutor::from_artifact(true, self.debug_artifact)); self.context = DebugContext::new( self.blackbox_solver, self.circuit, self.debug_artifact, self.initial_witness.clone(), - Box::new(DefaultForeignCallExecutor::new(true)), + foreign_call_executor, ); for opcode_location in breakpoints { self.context.add_breakpoint(opcode_location); @@ -438,14 +375,6 @@ impl<'a, B: BlackBoxFunctionSolver> ReplDebugger<'a, B> { } } -fn print_line_of_ellipsis(line_number: usize) { - println!("{}", format!("{:>3} {}", line_number, "...").dimmed()); -} - -fn print_dimmed_line(line_number: usize, line: &str) { - println!("{}", format!("{:>3} {:2} {}", line_number, "", line).dimmed()); -} - pub fn run( blackbox_solver: &B, circuit: &Circuit, diff --git a/tooling/debugger/src/source_code_printer.rs b/tooling/debugger/src/source_code_printer.rs new file mode 100644 index 00000000000..e430a8f0330 --- /dev/null +++ b/tooling/debugger/src/source_code_printer.rs @@ -0,0 +1,310 @@ +use codespan_reporting::files::Files; +use nargo::artifacts::debug::DebugArtifact; +use noirc_errors::Location; +use owo_colors::OwoColorize; +use std::ops::Range; + +#[derive(Debug, PartialEq)] +enum PrintedLine<'a> { + Skip, + Ellipsis { + line_number: usize, + }, + Content { + line_number: usize, + cursor: &'a str, + content: &'a str, + highlight: Option>, + }, +} + +#[derive(Clone, Debug)] +struct LocationPrintContext { + file_lines: Range, + printed_lines: Range, + location_lines: Range, + location_offset_in_first_line: Range, + location_offset_in_last_line: Range, +} + +// Given a DebugArtifact and an OpcodeLocation, prints all the source code +// locations the OpcodeLocation maps to, with some surrounding context and +// visual aids to highlight the location itself. +pub(crate) fn print_source_code_location(debug_artifact: &DebugArtifact, locations: &[Location]) { + let locations = locations.iter(); + + for loc in locations { + print_location_path(debug_artifact, *loc); + + let lines = render_location(debug_artifact, loc); + + for line in lines { + match line { + PrintedLine::Skip => {} + PrintedLine::Ellipsis { line_number } => print_ellipsis(line_number), + PrintedLine::Content { line_number, cursor, content, highlight } => { + print_content(line_number, cursor, content, highlight) + } + } + } + } +} + +fn print_location_path(debug_artifact: &DebugArtifact, loc: Location) { + let line_number = debug_artifact.location_line_number(loc).unwrap(); + let column_number = debug_artifact.location_column_number(loc).unwrap(); + + println!("At {}:{line_number}:{column_number}", debug_artifact.name(loc.file).unwrap()); +} + +fn print_ellipsis(line_number: usize) { + println!("{:>3} {:2} {}", line_number.dimmed(), "", "...".dimmed()); +} + +fn print_content(line_number: usize, cursor: &str, content: &str, highlight: Option>) { + match highlight { + Some(highlight) => { + println!( + "{:>3} {:2} {}{}{}", + line_number, + cursor, + content[0..highlight.start].to_string().dimmed(), + &content[highlight.start..highlight.end], + content[highlight.end..].to_string().dimmed(), + ); + } + None => { + println!( + "{:>3} {:2} {}", + line_number.dimmed(), + cursor.dimmed(), + content.to_string().dimmed(), + ); + } + } +} + +fn render_line( + current: usize, + content: &str, + loc_context: LocationPrintContext, +) -> PrintedLine<'_> { + let file_lines = loc_context.file_lines; + let printed_lines = loc_context.printed_lines; + let location_lines = loc_context.location_lines; + let line_number = current + 1; + + if current < printed_lines.start { + // Ignore lines before the context window we choose to show + PrintedLine::Skip + } else if 0 < current && current == printed_lines.start && current < location_lines.start { + // Denote that there's more lines before but we're not showing them + PrintedLine::Ellipsis { line_number } + } else if current < location_lines.start { + // Print lines before the location start without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == location_lines.start { + // Highlight current location from where it starts to the end of the current line + PrintedLine::Content { + line_number, + cursor: "->", + content, + highlight: Some(loc_context.location_offset_in_first_line), + } + } else if current < location_lines.end { + // Highlight current line if it's contained by the current location + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(Range { start: 0, end: content.len() }), + } + } else if current == location_lines.end { + // Highlight current location from the beginning of the line until the location's own end + PrintedLine::Content { + line_number, + cursor: "", + content, + highlight: Some(loc_context.location_offset_in_last_line), + } + } else if current < printed_lines.end || printed_lines.end == file_lines.end { + // Print lines after the location end without highlighting + PrintedLine::Content { line_number, cursor: "", content, highlight: None } + } else if current == printed_lines.end && printed_lines.end < file_lines.end { + // Denote that there's more lines after but we're not showing them + PrintedLine::Ellipsis { line_number } + } else { + PrintedLine::Skip + } +} + +// Given a Location in a DebugArtifact, returns a line iterator that specifies how to +// print the location's file. +// +// Consider for example the file (line numbers added to facilitate this doc): +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 +// ``` +// +// If the location to render is `poseidon::bn254::hash_2(x1)`, we'll render the file as: +// ``` +// 1 use dep::std::hash::poseidon; +// 2 +// 3 fn main(x1: [Field; 2], y1: pub Field, x2: [Field; 4], y2: pub Field) { +// 4 let hash1 = poseidon::bn254::hash_2(x1); +// 5 -> assert(hash1 == y1); +// 6 +// 7 let hash2 = poseidon::bn254::hash_4(x2); +// 8 assert(hash2 == y2); +// 9 } +// 10 ... +// ``` +// +// This is the result of: +// 1. Limiting the amount of printed lines to 5 before and 5 after the location. +// 2. Using ellipsis (...) to denote when some file lines have been left out of the render. +// 3. Using an arrow cursor (->) to denote where the rendered location starts. +// 4. Highlighting the location (here expressed as a block for the sake of the explanation). +// +// Note that locations may span multiple lines, so this function deals with that too. +fn render_location<'a>( + debug_artifact: &'a DebugArtifact, + loc: &'a Location, +) -> impl Iterator> { + let loc = *loc; + + let file_lines = Range { start: 0, end: debug_artifact.last_line_index(loc).unwrap() }; + + // Sub-range of file lines that this location spans + let location_lines = Range { + start: debug_artifact.location_line_index(loc).unwrap(), + end: debug_artifact.location_end_line_index(loc).unwrap(), + }; + + // How many lines before or after the location's lines we print + let context_lines = 5; + + // Sub-range of lines that we'll print, which includes location + context lines + let first_line_to_print = + if location_lines.start < context_lines { 0 } else { location_lines.start - context_lines }; + let last_line_to_print = std::cmp::min(location_lines.end + context_lines, file_lines.end); + let printed_lines = Range { start: first_line_to_print, end: last_line_to_print }; + + // Range of the location relative to its starting and ending lines + let location_offset_in_first_line = debug_artifact.location_in_line(loc).unwrap(); + let location_offset_in_last_line = debug_artifact.location_in_end_line(loc).unwrap(); + + let context = LocationPrintContext { + file_lines, + printed_lines, + location_lines, + location_offset_in_first_line, + location_offset_in_last_line, + }; + + let source = debug_artifact.location_source_code(loc).unwrap(); + source + .lines() + .enumerate() + .map(move |(index, content)| render_line(index, content, context.clone())) +} + +#[cfg(test)] +mod tests { + use crate::source_code_printer::render_location; + use crate::source_code_printer::PrintedLine::Content; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use nargo::artifacts::debug::DebugArtifact; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + #[test] + fn render_multiple_line_location() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations, (vec![], vec![]))]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_rendered: Vec<_> = render_location(&debug_artifact, &loc).collect(); + + assert_eq!( + location_rendered, + vec![ + Content { + line_number: 1, + cursor: "", + content: "pub fn main(mut state: [Field; 2]) -> [Field; 2] {", + highlight: None, + }, + Content { + line_number: 2, + cursor: "->", + content: " state = permute(", + highlight: Some(Range { start: 12, end: 20 }), + }, + Content { + line_number: 3, + cursor: "", + content: " consts::x5_2_config(),", + highlight: Some(Range { start: 0, end: 30 }), + }, + Content { + line_number: 4, + cursor: "", + content: " state);", + highlight: Some(Range { start: 0, end: 14 }), + }, + Content { line_number: 5, cursor: "", content: "", highlight: None }, + Content { line_number: 6, cursor: "", content: " state", highlight: None }, + Content { line_number: 7, cursor: "", content: "}", highlight: None }, + ] + ); + } +} diff --git a/tooling/debugger/tests/debug.rs b/tooling/debugger/tests/debug.rs new file mode 100644 index 00000000000..82872ce2739 --- /dev/null +++ b/tooling/debugger/tests/debug.rs @@ -0,0 +1,55 @@ +#[cfg(test)] +mod tests { + // Some of these imports are consumed by the injected tests + use assert_cmd::cargo::cargo_bin; + + use rexpect::spawn_bash; + + test_binary::build_test_binary_once!(mock_backend, "../backend_interface/test-binaries"); + + // include tests generated by `build.rs` + include!(concat!(env!("OUT_DIR"), "/debug.rs")); + + pub fn debugger_execution_success(test_program_dir: &str) { + let nargo_bin = + cargo_bin("nargo").into_os_string().into_string().expect("Cannot parse nargo path"); + + let mock_backend_path = + path_to_mock_backend().into_string().expect("Cannot parse mock_backend path"); + + let mut dbg_session = spawn_bash(Some(10000)).expect("Could not start bash session"); + + dbg_session + .send_line(&format!("export NARGO_BACKEND_PATH={}", mock_backend_path)) + .expect("Could not export NARGO_BACKEND_PATH."); + dbg_session.wait_for_prompt().expect("Could not export NARGO_BACKEND_PATH."); + + // Start debugger and test that it loads for the given program. + dbg_session + .execute( + &format!("{} debug --program-dir {} --force-brillig", nargo_bin, test_program_dir), + ".*\\Starting debugger.*", + ) + .expect("Could not start debugger"); + + // While running the debugger, issue a "continue" cmd, + // which should run to the program to end given + // we haven't set any breakpoints. + // ">" is the debugger's prompt, so finding one + // after running "continue" indicates that the + // debugger has not panicked until the end of the program. + dbg_session + .send_line("c") + .expect("Debugger panicked while attempting to step through program."); + dbg_session + .exp_string(">") + .expect("Failed while waiting for debugger to step through program."); + + // Run the "quit" command, then check that the debugger confirms + // having successfully solved the circuit witness. + dbg_session.send_line("quit").expect("Failed to quit debugger"); + dbg_session + .exp_regex(".*Circuit witness successfully solved.*") + .expect("Expected circuit witness to be successfully solved."); + } +} diff --git a/tooling/lsp/Cargo.toml b/tooling/lsp/Cargo.toml index 5f5e701da67..750e85694e2 100644 --- a/tooling/lsp/Cargo.toml +++ b/tooling/lsp/Cargo.toml @@ -23,7 +23,10 @@ serde_json.workspace = true tower.workspace = true async-lsp = { workspace = true, features = ["omni-trait"] } serde_with = "3.2.0" +thiserror.workspace = true fm.workspace = true +rayon = "1.8.0" +fxhash.workspace = true [target.'cfg(all(target_arch = "wasm32", not(target_os = "wasi")))'.dependencies] wasm-bindgen.workspace = true diff --git a/tooling/lsp/src/lib.rs b/tooling/lsp/src/lib.rs index 2ad8096a13f..b64fc474b0b 100644 --- a/tooling/lsp/src/lib.rs +++ b/tooling/lsp/src/lib.rs @@ -7,7 +7,7 @@ use std::{ collections::HashMap, future::Future, ops::{self, ControlFlow}, - path::PathBuf, + path::{Path, PathBuf}, pin::Pin, task::{self, Poll}, }; @@ -17,20 +17,32 @@ use async_lsp::{ router::Router, AnyEvent, AnyNotification, AnyRequest, ClientSocket, Error, LspService, ResponseError, }; -use fm::codespan_files as files; +use fm::{codespan_files as files, FileManager}; +use fxhash::FxHashSet; +use lsp_types::CodeLens; +use nargo::{parse_all, workspace::Workspace}; +use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::{file_manager_with_stdlib, prepare_crate, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::{Context, FunctionNameMatch}, + hir::{def_map::parse_file, Context, FunctionNameMatch, ParsedFiles}, + node_interner::NodeInterner, + parser::ParserError, + ParsedModule, }; +use rayon::prelude::*; + use notifications::{ on_did_change_configuration, on_did_change_text_document, on_did_close_text_document, on_did_open_text_document, on_did_save_text_document, on_exit, on_initialized, }; use requests::{ - on_formatting, on_goto_definition_request, on_initialize, on_profile_run_request, on_shutdown, + on_code_lens_request, on_formatting, on_goto_declaration_request, on_goto_definition_request, + on_goto_type_definition_request, on_initialize, on_profile_run_request, on_shutdown, on_test_run_request, on_tests_request, }; use serde_json::Value as JsonValue; +use thiserror::Error; use tower::Service; mod notifications; @@ -41,12 +53,24 @@ mod types; use solver::WrapperSolver; use types::{notification, request, NargoTest, NargoTestId, Position, Range, Url}; +#[derive(Debug, Error)] +pub enum LspError { + /// Error while Resolving Workspace. + #[error("Failed to Resolve Workspace - {0}")] + WorkspaceResolutionError(String), +} + // State for the LSP gets implemented on this struct and is internal to the implementation pub struct LspState { root_path: Option, client: ClientSocket, solver: WrapperSolver, + open_documents_count: usize, input_files: HashMap, + cached_lenses: HashMap>, + cached_definitions: HashMap, + cached_parsed_files: HashMap))>, + parsing_cache_enabled: bool, } impl LspState { @@ -56,6 +80,11 @@ impl LspState { root_path: None, solver: WrapperSolver(Box::new(solver)), input_files: HashMap::new(), + cached_lenses: HashMap::new(), + cached_definitions: HashMap::new(), + open_documents_count: 0, + cached_parsed_files: HashMap::new(), + parsing_cache_enabled: true, } } } @@ -72,10 +101,13 @@ impl NargoLspService { .request::(on_initialize) .request::(on_formatting) .request::(on_shutdown) + .request::(on_code_lens_request) .request::(on_tests_request) .request::(on_test_run_request) .request::(on_profile_run_request) .request::(on_goto_definition_request) + .request::(on_goto_declaration_request) + .request::(on_goto_type_definition_request) .notification::(on_initialized) .notification::(on_did_change_configuration) .notification::(on_did_open_text_document) @@ -130,10 +162,10 @@ fn get_package_tests_in_crate( .map(|(func_name, test_function)| { let location = context.function_meta(&test_function.get_id()).name.location; let file_id = location.file; - + let file_path = fm.path(file_id).expect("file must exist to contain tests"); let range = byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); - let file_uri = Url::from_file_path(fm.path(file_id)) + let file_uri = Url::from_file_path(file_path) .expect("Expected a valid file path that can be converted into a URI"); NargoTest { @@ -175,3 +207,121 @@ fn byte_span_to_range<'a, F: files::Files<'a> + ?Sized>( None } } + +pub(crate) fn resolve_workspace_for_source_path(file_path: &Path) -> Result { + let package_root = find_file_manifest(file_path); + + let toml_path = package_root.ok_or_else(|| { + LspError::WorkspaceResolutionError(format!( + "Nargo.toml not found for file: {:?}", + file_path + )) + })?; + + let workspace = resolve_workspace_from_toml( + &toml_path, + PackageSelection::All, + Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), + ) + .map_err(|err| LspError::WorkspaceResolutionError(err.to_string()))?; + + Ok(workspace) +} + +/// Prepares a package from a source string +/// This is useful for situations when we don't need dependencies +/// and just need to operate on single file. +/// +/// Use case for this is the LSP server and code lenses +/// which operate on single file and need to understand this file +/// in order to offer code lenses to the user +fn prepare_source(source: String, state: &mut LspState) -> (Context<'static, 'static>, CrateId) { + let root = Path::new(""); + let file_name = Path::new("main.nr"); + let mut file_manager = file_manager_with_stdlib(root); + file_manager.add_file_with_source(file_name, source).expect( + "Adding source buffer to file manager should never fail when file manager is empty", + ); + let parsed_files = parse_diff(&file_manager, state); + + let mut context = Context::new(file_manager, parsed_files); + let root_crate_id = prepare_crate(&mut context, file_name); + + (context, root_crate_id) +} + +fn parse_diff(file_manager: &FileManager, state: &mut LspState) -> ParsedFiles { + if state.parsing_cache_enabled { + let noir_file_hashes: Vec<_> = file_manager + .as_file_map() + .all_file_ids() + .par_bridge() + .filter_map(|&file_id| { + let file_path = file_manager.path(file_id).expect("expected file to exist"); + let file_extension = + file_path.extension().expect("expected all file paths to have an extension"); + if file_extension == "nr" { + Some(( + file_id, + file_path.to_path_buf(), + fxhash::hash(file_manager.fetch_file(file_id).expect("file must exist")), + )) + } else { + None + } + }) + .collect(); + + let cache_hits: Vec<_> = noir_file_hashes + .par_iter() + .filter_map(|(file_id, file_path, current_hash)| { + let cached_version = state.cached_parsed_files.get(file_path); + if let Some((hash, cached_parsing)) = cached_version { + if hash == current_hash { + return Some((*file_id, cached_parsing.clone())); + } + } + None + }) + .collect(); + + let cache_hits_ids: FxHashSet<_> = cache_hits.iter().map(|(file_id, _)| *file_id).collect(); + + let cache_misses: Vec<_> = noir_file_hashes + .into_par_iter() + .filter(|(id, _, _)| !cache_hits_ids.contains(id)) + .map(|(file_id, path, hash)| (file_id, path, hash, parse_file(file_manager, file_id))) + .collect(); + + cache_misses.iter().for_each(|(_, path, hash, parse_results)| { + state.cached_parsed_files.insert(path.clone(), (*hash, parse_results.clone())); + }); + + cache_misses + .into_iter() + .map(|(id, _, _, parse_results)| (id, parse_results)) + .chain(cache_hits.into_iter()) + .collect() + } else { + parse_all(file_manager) + } +} + +#[test] +fn prepare_package_from_source_string() { + let source = r#" + fn main() { + let x = 1; + let y = 2; + let z = x + y; + } + "#; + + let client = ClientSocket::new_closed(); + let mut state = LspState::new(&client, acvm::blackbox_solver::StubbedBlackBoxSolver); + + let (mut context, crate_id) = crate::prepare_source(source.to_string(), &mut state); + let _check_result = noirc_driver::check_crate(&mut context, crate_id, false, false); + let main_func_id = context.get_main_function(&crate_id); + assert!(main_func_id.is_some()); +} diff --git a/tooling/lsp/src/notifications/mod.rs b/tooling/lsp/src/notifications/mod.rs index 61f0d231738..355bb7832c4 100644 --- a/tooling/lsp/src/notifications/mod.rs +++ b/tooling/lsp/src/notifications/mod.rs @@ -1,19 +1,21 @@ use std::ops::ControlFlow; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use nargo::prepare_package; -use nargo_toml::{find_file_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; +use noirc_driver::{check_crate, file_manager_with_stdlib}; use noirc_errors::{DiagnosticKind, FileDiagnostic}; +use crate::requests::collect_lenses_for_package; use crate::types::{ notification, Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, - DidSaveTextDocumentParams, InitializedParams, LogMessageParams, MessageType, NargoPackageTests, - PublishDiagnosticsParams, + DidSaveTextDocumentParams, InitializedParams, NargoPackageTests, PublishDiagnosticsParams, }; -use crate::{byte_span_to_range, get_package_tests_in_crate, LspState}; +use crate::{ + byte_span_to_range, get_package_tests_in_crate, parse_diff, prepare_source, + resolve_workspace_for_source_path, LspState, +}; pub(super) fn on_initialized( _state: &mut LspState, @@ -34,7 +36,16 @@ pub(super) fn on_did_open_text_document( params: DidOpenTextDocumentParams, ) -> ControlFlow> { state.input_files.insert(params.text_document.uri.to_string(), params.text_document.text); - ControlFlow::Continue(()) + + let document_uri = params.text_document.uri; + + match process_noir_document(document_uri, state) { + Ok(_) => { + state.open_documents_count += 1; + ControlFlow::Continue(()) + } + Err(err) => ControlFlow::Break(Err(err)), + } } pub(super) fn on_did_change_text_document( @@ -42,7 +53,38 @@ pub(super) fn on_did_change_text_document( params: DidChangeTextDocumentParams, ) -> ControlFlow> { let text = params.content_changes.into_iter().next().unwrap().text; - state.input_files.insert(params.text_document.uri.to_string(), text); + state.input_files.insert(params.text_document.uri.to_string(), text.clone()); + + let (mut context, crate_id) = prepare_source(text, state); + let _ = check_crate(&mut context, crate_id, false, false); + + let workspace = match resolve_workspace_for_source_path( + params.text_document.uri.to_file_path().unwrap().as_path(), + ) { + Ok(workspace) => workspace, + Err(lsp_error) => { + return ControlFlow::Break(Err(ResponseError::new( + ErrorCode::REQUEST_FAILED, + lsp_error.to_string(), + ) + .into())) + } + }; + let package = match workspace.members.first() { + Some(package) => package, + None => { + return ControlFlow::Break(Err(ResponseError::new( + ErrorCode::REQUEST_FAILED, + "Selected workspace has no members", + ) + .into())) + } + }; + + let lenses = collect_lenses_for_package(&context, crate_id, &workspace, package, None); + + state.cached_lenses.insert(params.text_document.uri.to_string(), lenses); + ControlFlow::Continue(()) } @@ -51,6 +93,14 @@ pub(super) fn on_did_close_text_document( params: DidCloseTextDocumentParams, ) -> ControlFlow> { state.input_files.remove(¶ms.text_document.uri.to_string()); + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + + state.open_documents_count -= 1; + + if state.open_documents_count == 0 { + state.cached_definitions.clear(); + } + ControlFlow::Continue(()) } @@ -58,58 +108,44 @@ pub(super) fn on_did_save_text_document( state: &mut LspState, params: DidSaveTextDocumentParams, ) -> ControlFlow> { - let file_path = match params.text_document.uri.to_file_path() { - Ok(file_path) => file_path, - Err(()) => { - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - "URI is not a valid file path", - ) - .into())) - } - }; + let document_uri = params.text_document.uri; - let package_root = find_file_manifest(file_path.as_path()); + match process_noir_document(document_uri, state) { + Ok(_) => ControlFlow::Continue(()), + Err(err) => ControlFlow::Break(Err(err)), + } +} - let toml_path = match package_root { - Some(toml_path) => toml_path, - None => { - // If we cannot find a manifest, we log a warning but return no diagnostics - // We can reconsider this when we can build a file without the need for a Nargo.toml file to resolve deps - let _ = state.client.log_message(LogMessageParams { - typ: MessageType::WARNING, - message: format!("Nargo.toml not found for file: {:}", file_path.display()), - }); - return ControlFlow::Continue(()); - } - }; +fn process_noir_document( + document_uri: lsp_types::Url, + state: &mut LspState, +) -> Result<(), async_lsp::Error> { + let file_path = document_uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; - let workspace = match resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) { - Ok(workspace) => workspace, - Err(err) => { - // If we found a manifest, but the workspace is invalid, we raise an error about it - return ControlFlow::Break(Err(ResponseError::new( - ErrorCode::REQUEST_FAILED, - format!("{err}"), - ) - .into())); - } - }; + let workspace = resolve_workspace_for_source_path(&file_path).map_err(|lsp_error| { + ResponseError::new(ErrorCode::REQUEST_FAILED, lsp_error.to_string()) + })?; + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + + let parsed_files = parse_diff(&workspace_file_manager, state); let diagnostics: Vec<_> = workspace .into_iter() .flat_map(|package| -> Vec { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); let file_diagnostics = match check_crate(&mut context, crate_id, false, false) { Ok(((), warnings)) => warnings, Err(errors_and_warnings) => errors_and_warnings, }; + let package_root_dir: String = package.root_dir.as_os_str().to_string_lossy().into(); + // We don't add test headings for a package if it contains no `#[test]` functions if let Some(tests) = get_package_tests_in_crate(&context, &crate_id, &package.name) { let _ = state.client.notify::(NargoPackageTests { @@ -118,6 +154,17 @@ pub(super) fn on_did_save_text_document( }); } + let collected_lenses = crate::requests::collect_lenses_for_package( + &context, + crate_id, + &workspace, + package, + Some(&file_path), + ); + state.cached_lenses.insert(document_uri.to_string(), collected_lenses); + + state.cached_definitions.insert(package_root_dir, context.def_interner); + let fm = &context.file_manager; let files = fm.as_file_map(); @@ -126,7 +173,9 @@ pub(super) fn on_did_save_text_document( .filter_map(|FileDiagnostic { file_id, diagnostic, call_stack: _ }| { // Ignore diagnostics for any file that wasn't the file we saved // TODO: In the future, we could create "related" diagnostics for these files - if fm.path(file_id) != file_path { + if fm.path(file_id).expect("file must exist to have emitted diagnostic") + != file_path + { return None; } @@ -152,14 +201,13 @@ pub(super) fn on_did_save_text_document( .collect() }) .collect(); - let _ = state.client.publish_diagnostics(PublishDiagnosticsParams { - uri: params.text_document.uri, + uri: document_uri, version: None, diagnostics, }); - ControlFlow::Continue(()) + Ok(()) } pub(super) fn on_exit( diff --git a/tooling/lsp/src/requests/code_lens_request.rs b/tooling/lsp/src/requests/code_lens_request.rs new file mode 100644 index 00000000000..893ba33d845 --- /dev/null +++ b/tooling/lsp/src/requests/code_lens_request.rs @@ -0,0 +1,239 @@ +use std::future::{self, Future}; + +use async_lsp::{ErrorCode, ResponseError}; + +use nargo::{package::Package, workspace::Workspace}; +use noirc_driver::check_crate; +use noirc_frontend::hir::FunctionNameMatch; + +use crate::{ + byte_span_to_range, prepare_source, resolve_workspace_for_source_path, + types::{CodeLens, CodeLensParams, CodeLensResult, Command}, + LspState, +}; + +const ARROW: &str = "▶\u{fe0e}"; +const TEST_COMMAND: &str = "nargo.test"; +const TEST_CODELENS_TITLE: &str = "Run Test"; +const COMPILE_COMMAND: &str = "nargo.compile"; +const COMPILE_CODELENS_TITLE: &str = "Compile"; +const INFO_COMMAND: &str = "nargo.info"; +const INFO_CODELENS_TITLE: &str = "Info"; +const EXECUTE_COMMAND: &str = "nargo.execute"; +const EXECUTE_CODELENS_TITLE: &str = "Execute"; + +const PROFILE_COMMAND: &str = "nargo.profile"; +const PROFILE_CODELENS_TITLE: &str = "Profile"; + +fn with_arrow(title: &str) -> String { + format!("{ARROW} {title}") +} + +fn package_selection_args(workspace: &Workspace, package: &Package) -> Vec { + vec![ + "--program-dir".into(), + workspace.root_dir.display().to_string().into(), + "--package".into(), + package.name.to_string().into(), + ] +} + +pub(crate) fn on_code_lens_request( + state: &mut LspState, + params: CodeLensParams, +) -> impl Future> { + future::ready(on_code_lens_request_inner(state, params)) +} + +fn on_code_lens_request_inner( + state: &mut LspState, + params: CodeLensParams, +) -> Result { + let file_path = params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + if let Some(collected_lenses) = state.cached_lenses.get(¶ms.text_document.uri.to_string()) { + return Ok(Some(collected_lenses.clone())); + } + + let source_string = std::fs::read_to_string(&file_path).map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not read file from disk") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); + + let (mut context, crate_id) = prepare_source(source_string, state); + // We ignore the warnings and errors produced by compilation for producing code lenses + // because we can still get the test functions even if compilation fails + let _ = check_crate(&mut context, crate_id, false, false); + + let collected_lenses = + collect_lenses_for_package(&context, crate_id, &workspace, package, None); + + if collected_lenses.is_empty() { + state.cached_lenses.remove(¶ms.text_document.uri.to_string()); + Ok(None) + } else { + state + .cached_lenses + .insert(params.text_document.uri.to_string().clone(), collected_lenses.clone()); + Ok(Some(collected_lenses)) + } +} + +pub(crate) fn collect_lenses_for_package( + context: &noirc_frontend::macros_api::HirContext, + crate_id: noirc_frontend::macros_api::CrateId, + workspace: &Workspace, + package: &Package, + file_path: Option<&std::path::PathBuf>, +) -> Vec { + let mut lenses: Vec = vec![]; + let fm = &context.file_manager; + let files = fm.as_file_map(); + let tests = + context.get_all_test_functions_in_crate_matching(&crate_id, FunctionNameMatch::Anything); + for (func_name, test_function) in tests { + let location = context.function_meta(&test_function.get_id()).name.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id).expect("file must exist to contain tests") != *file_path { + continue; + } + } + + let range = byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let test_command = Command { + title: with_arrow(TEST_CODELENS_TITLE), + command: TEST_COMMAND.into(), + arguments: Some( + [ + package_selection_args(workspace, package), + vec!["--exact".into(), func_name.into()], + ] + .concat(), + ), + }; + + let test_lens = CodeLens { range, command: Some(test_command), data: None }; + + lenses.push(test_lens); + } + + if package.is_binary() { + if let Some(main_func_id) = context.get_main_function(&crate_id) { + let location = context.function_meta(&main_func_id).name.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id).expect("file must exist to contain `main` function") + != *file_path + { + return lenses; + } + } + + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + + lenses.push(compile_lens); + + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let info_lens = CodeLens { range, command: Some(info_command), data: None }; + + lenses.push(info_lens); + + let execute_command = Command { + title: EXECUTE_CODELENS_TITLE.to_string(), + command: EXECUTE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let execute_lens = CodeLens { range, command: Some(execute_command), data: None }; + + lenses.push(execute_lens); + + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + + lenses.push(profile_lens); + } + } + + if package.is_contract() { + // Currently not looking to deduplicate this since we don't have a clear decision on if the Contract stuff is staying + for contract in context.get_all_contracts(&crate_id) { + let location = contract.location; + let file_id = location.file; + + // Ignore diagnostics for any file that wasn't the file we saved + // TODO: In the future, we could create "related" diagnostics for these files + if let Some(file_path) = file_path { + if fm.path(file_id).expect("file must exist to contain a contract") != *file_path { + continue; + } + } + + let range = + byte_span_to_range(files, file_id, location.span.into()).unwrap_or_default(); + + let compile_command = Command { + title: with_arrow(COMPILE_CODELENS_TITLE), + command: COMPILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let compile_lens = CodeLens { range, command: Some(compile_command), data: None }; + + lenses.push(compile_lens); + + let info_command = Command { + title: INFO_CODELENS_TITLE.to_string(), + command: INFO_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let info_lens = CodeLens { range, command: Some(info_command), data: None }; + + lenses.push(info_lens); + + let profile_command = Command { + title: PROFILE_CODELENS_TITLE.to_string(), + command: PROFILE_COMMAND.into(), + arguments: Some(package_selection_args(workspace, package)), + }; + + let profile_lens = CodeLens { range, command: Some(profile_command), data: None }; + + lenses.push(profile_lens); + } + } + + lenses +} diff --git a/tooling/lsp/src/requests/goto_declaration.rs b/tooling/lsp/src/requests/goto_declaration.rs new file mode 100644 index 00000000000..8e6d519b895 --- /dev/null +++ b/tooling/lsp/src/requests/goto_declaration.rs @@ -0,0 +1,82 @@ +use std::future::{self, Future}; + +use crate::types::GotoDeclarationResult; +use crate::LspState; +use crate::{parse_diff, resolve_workspace_for_source_path}; +use async_lsp::{ErrorCode, ResponseError}; + +use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse}; + +use nargo::insert_all_files_for_workspace_into_file_manager; +use noirc_driver::file_manager_with_stdlib; + +use super::{position_to_byte_index, to_lsp_location}; + +pub(crate) fn on_goto_declaration_request( + state: &mut LspState, + params: GotoDeclarationParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params); + future::ready(result) +} + +fn on_goto_definition_inner( + state: &mut LspState, + params: GotoDeclarationParams, +) -> Result { + let file_path = + params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { + ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") + })?; + + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); + + let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); + + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); + + let interner; + if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { + interner = def_interner; + } else { + // We ignore the warnings and errors produced by compilation while resolving the definition + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + interner = &context.def_interner; + } + + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; + + let goto_declaration_response = + interner.get_declaration_location_from(search_for_location).and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDeclarationResponse = + GotoDeclarationResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_declaration_response) +} diff --git a/tooling/lsp/src/requests/goto_definition.rs b/tooling/lsp/src/requests/goto_definition.rs index 558851d4ecf..88bb667f2e8 100644 --- a/tooling/lsp/src/requests/goto_definition.rs +++ b/tooling/lsp/src/requests/goto_definition.rs @@ -1,175 +1,109 @@ use std::future::{self, Future}; +use crate::{parse_diff, resolve_workspace_for_source_path}; use crate::{types::GotoDefinitionResult, LspState}; -use async_lsp::{ErrorCode, LanguageClient, ResponseError}; -use fm::codespan_files::Error; -use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse, Location}; -use lsp_types::{Position, Url}; -use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use async_lsp::{ErrorCode, ResponseError}; + +use lsp_types::request::GotoTypeDefinitionParams; +use lsp_types::{GotoDefinitionParams, GotoDefinitionResponse}; +use nargo::insert_all_files_for_workspace_into_file_manager; +use noirc_driver::file_manager_with_stdlib; + +use super::{position_to_byte_index, to_lsp_location}; pub(crate) fn on_goto_definition_request( state: &mut LspState, params: GotoDefinitionParams, ) -> impl Future> { - let result = on_goto_definition_inner(state, params); + let result = on_goto_definition_inner(state, params, false); + future::ready(result) +} + +pub(crate) fn on_goto_type_definition_request( + state: &mut LspState, + params: GotoTypeDefinitionParams, +) -> impl Future> { + let result = on_goto_definition_inner(state, params, true); future::ready(result) } fn on_goto_definition_inner( state: &mut LspState, params: GotoDefinitionParams, + return_type_location_instead: bool, ) -> Result { - let root_path = state.root_path.as_deref().ok_or_else(|| { - ResponseError::new(ErrorCode::REQUEST_FAILED, "Could not find project root") - })?; - let file_path = params.text_document_position_params.text_document.uri.to_file_path().map_err(|_| { ResponseError::new(ErrorCode::REQUEST_FAILED, "URI is not a valid file path") })?; - let toml_path = match find_package_manifest(root_path, &file_path) { - Ok(toml_path) => toml_path, - Err(err) => { - let _ = state.client.log_message(lsp_types::LogMessageParams { - typ: lsp_types::MessageType::WARNING, - message: err.to_string(), - }); - return Ok(None); - } - }; - let workspace = resolve_workspace_from_toml( - &toml_path, - PackageSelection::All, - Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), - ) - .map_err(|err| { - // If we found a manifest, but the workspace is invalid, we raise an error about it - ResponseError::new(ErrorCode::REQUEST_FAILED, err) - })?; + let workspace = resolve_workspace_for_source_path(file_path.as_path()).unwrap(); + let package = workspace.members.first().unwrap(); - let mut definition_position = None; + let package_root_path: String = package.root_dir.as_os_str().to_string_lossy().into(); - for package in &workspace { - let (mut context, crate_id) = nargo::prepare_package(package); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); - // We ignore the warnings and errors produced by compilation while resolving the definition - let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); - - let files = context.file_manager.as_file_map(); - let file_id = context.file_manager.name_to_id(file_path.clone()); - - if let Some(file_id) = file_id { - let byte_index = position_to_byte_index( - files, - file_id, - ¶ms.text_document_position_params.position, - ); - - if let Ok(byte_index) = byte_index { - let search_for_location = noirc_errors::Location { - file: file_id, - span: noirc_errors::Span::single_char(byte_index as u32), - }; - let found_location = context.get_definition_location_from(search_for_location); - - if let Some(found_location) = found_location { - let file_id = found_location.file; - definition_position = to_lsp_location(files, file_id, found_location.span); - } - } - } - } + let (mut context, crate_id) = + nargo::prepare_package(&workspace_file_manager, &parsed_files, package); - if let Some(definition_position) = definition_position { - let response: GotoDefinitionResponse = - GotoDefinitionResponse::from(definition_position).to_owned(); - Ok(Some(response)) + let interner; + if let Some(def_interner) = state.cached_definitions.get(&package_root_path) { + interner = def_interner; } else { - Ok(None) - } -} - -fn to_lsp_location<'a, F>( - files: &'a F, - file_id: F::FileId, - definition_span: noirc_errors::Span, -) -> Option -where - F: fm::codespan_files::Files<'a> + ?Sized, -{ - let range = crate::byte_span_to_range(files, file_id, definition_span.into())?; - let file_name = files.name(file_id).ok()?; - - let path = file_name.to_string(); - let uri = Url::from_file_path(path).ok()?; - - Some(Location { uri, range }) -} - -pub(crate) fn position_to_byte_index<'a, F>( - files: &'a F, - file_id: F::FileId, - position: &Position, -) -> Result -where - F: fm::codespan_files::Files<'a> + ?Sized, -{ - let source = files.source(file_id)?; - let source = source.as_ref(); - - let line_span = files.line_range(file_id, position.line as usize)?; - - let line_str = source.get(line_span.clone()); - - if let Some(line_str) = line_str { - let byte_offset = character_to_line_offset(line_str, position.character)?; - Ok(line_span.start + byte_offset) - } else { - Err(Error::InvalidCharBoundary { given: position.line as usize }) + // We ignore the warnings and errors produced by compilation while resolving the definition + let _ = noirc_driver::check_crate(&mut context, crate_id, false, false); + interner = &context.def_interner; } -} -fn character_to_line_offset(line: &str, character: u32) -> Result { - let line_len = line.len(); - let mut character_offset = 0; - - let mut chars = line.chars(); - while let Some(ch) = chars.next() { - if character_offset == character { - let chars_off = chars.as_str().len(); - let ch_off = ch.len_utf8(); - - return Ok(line_len - chars_off - ch_off); - } - - character_offset += ch.len_utf16() as u32; - } + let files = context.file_manager.as_file_map(); + let file_id = context.file_manager.name_to_id(file_path.clone()).ok_or(ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not find file in file manager. File path: {:?}", file_path), + ))?; + let byte_index = + position_to_byte_index(files, file_id, ¶ms.text_document_position_params.position) + .map_err(|err| { + ResponseError::new( + ErrorCode::REQUEST_FAILED, + format!("Could not convert position to byte index. Error: {:?}", err), + ) + })?; + + let search_for_location = noirc_errors::Location { + file: file_id, + span: noirc_errors::Span::single_char(byte_index as u32), + }; - // Handle positions after the last character on the line - if character_offset == character { - Ok(line_len) - } else { - Err(Error::ColumnTooLarge { given: character_offset as usize, max: line.len() }) - } + let goto_definition_response = interner + .get_definition_location_from(search_for_location, return_type_location_instead) + .and_then(|found_location| { + let file_id = found_location.file; + let definition_position = to_lsp_location(files, file_id, found_location.span)?; + let response: GotoDefinitionResponse = + GotoDefinitionResponse::from(definition_position).to_owned(); + Some(response) + }); + + Ok(goto_definition_response) } #[cfg(test)] mod goto_definition_tests { + use acvm::blackbox_solver::StubbedBlackBoxSolver; use async_lsp::ClientSocket; + use lsp_types::{Position, Url}; use tokio::test; - use crate::solver::MockBackend; - use super::*; #[test] async fn test_on_goto_definition() { let client = ClientSocket::new_closed(); - let solver = MockBackend; - let mut state = LspState::new(&client, solver); + let mut state = LspState::new(&client, StubbedBlackBoxSolver); let root_path = std::env::current_dir() .unwrap() diff --git a/tooling/lsp/src/requests/mod.rs b/tooling/lsp/src/requests/mod.rs index e2fdcdf08da..ec56cf5045a 100644 --- a/tooling/lsp/src/requests/mod.rs +++ b/tooling/lsp/src/requests/mod.rs @@ -1,9 +1,14 @@ use std::future::Future; -use crate::types::InitializeParams; +use crate::types::{CodeLensOptions, InitializeParams}; use async_lsp::ResponseError; -use lsp_types::{Position, TextDocumentSyncCapability, TextDocumentSyncKind}; +use fm::codespan_files::Error; +use lsp_types::{ + DeclarationCapability, Location, Position, TextDocumentSyncCapability, TextDocumentSyncKind, + TypeDefinitionProviderCapability, Url, +}; use nargo_fmt::Config; +use serde::{Deserialize, Serialize}; use crate::{ types::{InitializeResult, NargoCapability, NargoTestsOptions, ServerCapabilities}, @@ -20,25 +25,70 @@ use crate::{ // They are not attached to the `NargoLspService` struct so they can be unit tested with only `LspState` // and params passed in. +mod code_lens_request; +mod goto_declaration; mod goto_definition; mod profile_run; mod test_run; mod tests; pub(crate) use { - goto_definition::on_goto_definition_request, profile_run::on_profile_run_request, + code_lens_request::collect_lenses_for_package, code_lens_request::on_code_lens_request, + goto_declaration::on_goto_declaration_request, goto_definition::on_goto_definition_request, + goto_definition::on_goto_type_definition_request, profile_run::on_profile_run_request, test_run::on_test_run_request, tests::on_tests_request, }; +/// LSP client will send initialization request after the server has started. +/// [InitializeParams].`initialization_options` will contain the options sent from the client. +#[derive(Debug, Deserialize, Serialize)] +struct LspInitializationOptions { + /// Controls whether code lens is enabled by the server + /// By default this will be set to true (enabled). + #[serde(rename = "enableCodeLens", default = "default_enable_code_lens")] + enable_code_lens: bool, + + #[serde(rename = "enableParsingCache", default = "default_enable_parsing_cache")] + enable_parsing_cache: bool, +} + +fn default_enable_code_lens() -> bool { + true +} + +fn default_enable_parsing_cache() -> bool { + true +} + +impl Default for LspInitializationOptions { + fn default() -> Self { + Self { + enable_code_lens: default_enable_code_lens(), + enable_parsing_cache: default_enable_parsing_cache(), + } + } +} + pub(crate) fn on_initialize( state: &mut LspState, params: InitializeParams, ) -> impl Future> { state.root_path = params.root_uri.and_then(|root_uri| root_uri.to_file_path().ok()); + let initialization_options: LspInitializationOptions = params + .initialization_options + .and_then(|value| serde_json::from_value(value).ok()) + .unwrap_or_default(); + state.parsing_cache_enabled = initialization_options.enable_parsing_cache; - async { + async move { let text_document_sync = TextDocumentSyncCapability::Kind(TextDocumentSyncKind::FULL); + let code_lens = if initialization_options.enable_code_lens { + Some(CodeLensOptions { resolve_provider: Some(false) }) + } else { + None + }; + let nargo = NargoCapability { tests: Some(NargoTestsOptions { fetch: Some(true), @@ -50,9 +100,12 @@ pub(crate) fn on_initialize( Ok(InitializeResult { capabilities: ServerCapabilities { text_document_sync: Some(text_document_sync), + code_lens_provider: code_lens, document_formatting_provider: true, nargo: Some(nargo), definition_provider: Some(lsp_types::OneOf::Left(true)), + declaration_provider: Some(DeclarationCapability::Simple(true)), + type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), }, server_info: None, }) @@ -95,6 +148,70 @@ fn on_formatting_inner( } } +pub(crate) fn position_to_byte_index<'a, F>( + files: &'a F, + file_id: F::FileId, + position: &Position, +) -> Result +where + F: fm::codespan_files::Files<'a> + ?Sized, +{ + let source = files.source(file_id)?; + let source = source.as_ref(); + + let line_span = files.line_range(file_id, position.line as usize)?; + + let line_str = source.get(line_span.clone()); + + if let Some(line_str) = line_str { + let byte_offset = character_to_line_offset(line_str, position.character)?; + Ok(line_span.start + byte_offset) + } else { + Err(Error::InvalidCharBoundary { given: position.line as usize }) + } +} + +fn character_to_line_offset(line: &str, character: u32) -> Result { + let line_len = line.len(); + let mut character_offset = 0; + + let mut chars = line.chars(); + while let Some(ch) = chars.next() { + if character_offset == character { + let chars_off = chars.as_str().len(); + let ch_off = ch.len_utf8(); + + return Ok(line_len - chars_off - ch_off); + } + + character_offset += ch.len_utf16() as u32; + } + + // Handle positions after the last character on the line + if character_offset == character { + Ok(line_len) + } else { + Err(Error::ColumnTooLarge { given: character_offset as usize, max: line.len() }) + } +} + +fn to_lsp_location<'a, F>( + files: &'a F, + file_id: F::FileId, + definition_span: noirc_errors::Span, +) -> Option +where + F: fm::codespan_files::Files<'a> + ?Sized, +{ + let range = crate::byte_span_to_range(files, file_id, definition_span.into())?; + let file_name = files.name(file_id).ok()?; + + let path = file_name.to_string(); + let uri = Url::from_file_path(path).ok()?; + + Some(Location { uri, range }) +} + pub(crate) fn on_shutdown( _state: &mut LspState, _params: (), @@ -104,19 +221,19 @@ pub(crate) fn on_shutdown( #[cfg(test)] mod initialization { + use acvm::blackbox_solver::StubbedBlackBoxSolver; use async_lsp::ClientSocket; - use lsp_types::{InitializeParams, TextDocumentSyncCapability, TextDocumentSyncKind}; + use lsp_types::{ + CodeLensOptions, InitializeParams, TextDocumentSyncCapability, TextDocumentSyncKind, + }; use tokio::test; - use crate::{ - requests::on_initialize, solver::MockBackend, types::ServerCapabilities, LspState, - }; + use crate::{requests::on_initialize, types::ServerCapabilities, LspState}; #[test] async fn test_on_initialize() { let client = ClientSocket::new_closed(); - let solver = MockBackend; - let mut state = LspState::new(&client, solver); + let mut state = LspState::new(&client, StubbedBlackBoxSolver); let params = InitializeParams::default(); let response = on_initialize(&mut state, params).await.unwrap(); assert!(matches!( @@ -125,6 +242,7 @@ mod initialization { text_document_sync: Some(TextDocumentSyncCapability::Kind( TextDocumentSyncKind::FULL )), + code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(false) }), document_formatting_provider: true, .. } @@ -132,3 +250,25 @@ mod initialization { assert!(response.server_info.is_none()); } } + +#[cfg(test)] +mod character_to_line_offset_tests { + use super::*; + + #[test] + fn test_character_to_line_offset() { + let line = "Hello, dark!"; + let character = 8; + + let result = character_to_line_offset(line, character).unwrap(); + assert_eq!(result, 8); + + // In the case of a multi-byte character, the offset should be the byte index of the character + // byte offset for 8 character (黑) is expected to be 10 + let line = "Hello, 黑!"; + let character = 8; + + let result = character_to_line_offset(line, character).unwrap(); + assert_eq!(result, 10); + } +} diff --git a/tooling/lsp/src/requests/profile_run.rs b/tooling/lsp/src/requests/profile_run.rs index 84888d30ba5..8ba91338f55 100644 --- a/tooling/lsp/src/requests/profile_run.rs +++ b/tooling/lsp/src/requests/profile_run.rs @@ -3,14 +3,17 @@ use std::{ future::{self, Future}, }; -use acvm::{acir::circuit::Opcode, Language}; +use acvm::ExpressionWidth; use async_lsp::{ErrorCode, ResponseError}; -use nargo::artifacts::debug::DebugArtifact; +use nargo::{artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, DebugFile, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_errors::{debug_info::OpCodesCount, Location}; use crate::{ + parse_diff, types::{NargoProfileRunParams, NargoProfileRunResult}, LspState, }; @@ -24,7 +27,7 @@ pub(crate) fn on_profile_run_request( } fn on_profile_run_request_inner( - state: &LspState, + state: &mut LspState, params: NargoProfileRunParams, ) -> Result { let root_path = state.root_path.as_deref().ok_or_else(|| { @@ -48,25 +51,20 @@ fn on_profile_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(_package) => { - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .into_iter() - .filter(|package| !package.is_library()) - .cloned() - .partition(|package| package.is_binary()); - - // # TODO(#3504): Consider how to incorporate Backend relevant information in wider context. - let is_opcode_supported = |_opcode: &Opcode| true; - let np_language = Language::PLONKCSat { width: 3 }; + let expression_width = ExpressionWidth::Bounded { width: 3 }; let (compiled_programs, compiled_contracts) = nargo::ops::compile_workspace( + &workspace_file_manager, + &parsed_files, &workspace, - &binary_packages, - &contract_packages, - np_language, - is_opcode_supported, + expression_width, &CompileOptions::default(), ) .map_err(|err| ResponseError::new(ErrorCode::REQUEST_FAILED, err))?; diff --git a/tooling/lsp/src/requests/test_run.rs b/tooling/lsp/src/requests/test_run.rs index 07d5b6635fd..259b5c5779e 100644 --- a/tooling/lsp/src/requests/test_run.rs +++ b/tooling/lsp/src/requests/test_run.rs @@ -2,11 +2,14 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, ResponseError}; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + check_crate, file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::hir::FunctionNameMatch; use crate::{ @@ -47,10 +50,15 @@ fn on_test_run_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + // Since we filtered on crate name, this should be the only item in the iterator match workspace.into_iter().next() { Some(package) => { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); if check_crate(&mut context, crate_id, false, false).is_err() { let result = NargoTestRunResult { id: params.id.clone(), @@ -77,6 +85,7 @@ fn on_test_run_request_inner( &mut context, test_function, false, + None, &CompileOptions::default(), ); let result = match test_result { diff --git a/tooling/lsp/src/requests/tests.rs b/tooling/lsp/src/requests/tests.rs index 9a67eaae6db..5b78fcc65c3 100644 --- a/tooling/lsp/src/requests/tests.rs +++ b/tooling/lsp/src/requests/tests.rs @@ -2,12 +2,12 @@ use std::future::{self, Future}; use async_lsp::{ErrorCode, LanguageClient, ResponseError}; use lsp_types::{LogMessageParams, MessageType}; -use nargo::prepare_package; +use nargo::{insert_all_files_for_workspace_into_file_manager, prepare_package}; use nargo_toml::{find_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{check_crate, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{check_crate, file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use crate::{ - get_package_tests_in_crate, + get_package_tests_in_crate, parse_diff, types::{NargoPackageTests, NargoTestsParams, NargoTestsResult}, LspState, }; @@ -50,10 +50,15 @@ fn on_tests_request_inner( ResponseError::new(ErrorCode::REQUEST_FAILED, err) })?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_diff(&workspace_file_manager, state); + let package_tests: Vec<_> = workspace .into_iter() .filter_map(|package| { - let (mut context, crate_id) = prepare_package(package); + let (mut context, crate_id) = + prepare_package(&workspace_file_manager, &parsed_files, package); // We ignore the warnings and errors produced by compilation for producing tests // because we can still get the test functions even if compilation fails let _ = check_crate(&mut context, crate_id, false, false); diff --git a/tooling/lsp/src/solver.rs b/tooling/lsp/src/solver.rs index 090f71d63b4..6217b7ad71f 100644 --- a/tooling/lsp/src/solver.rs +++ b/tooling/lsp/src/solver.rs @@ -39,46 +39,22 @@ impl BlackBoxFunctionSolver for WrapperSolver { ) -> Result { self.0.pedersen_hash(inputs, domain_separator) } -} - -// We also have a mocked implementation of the `BlackBoxFunctionSolver` trait for use in tests -#[cfg(test)] -pub(crate) struct MockBackend; - -#[cfg(test)] -impl BlackBoxFunctionSolver for MockBackend { - fn schnorr_verify( + fn ec_add( &self, - _public_key_x: &acvm::FieldElement, - _public_key_y: &acvm::FieldElement, - _signature: &[u8], - _message: &[u8], - ) -> Result { - unimplemented!() - } - - fn pedersen_commitment( - &self, - _inputs: &[acvm::FieldElement], - _domain_separator: u32, + input1_x: &acvm::FieldElement, + input1_y: &acvm::FieldElement, + input2_x: &acvm::FieldElement, + input2_y: &acvm::FieldElement, ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - unimplemented!() + self.0.ec_add(input1_x, input1_y, input2_x, input2_y) } - fn fixed_base_scalar_mul( + fn ec_double( &self, - _low: &acvm::FieldElement, - _high: &acvm::FieldElement, + input_x: &acvm::FieldElement, + input_y: &acvm::FieldElement, ) -> Result<(acvm::FieldElement, acvm::FieldElement), acvm::BlackBoxResolutionError> { - unimplemented!() - } - - fn pedersen_hash( - &self, - _inputs: &[acvm::FieldElement], - _domain_separator: u32, - ) -> Result { - unimplemented!() + self.0.ec_double(input_x, input_y) } } diff --git a/tooling/lsp/src/types.rs b/tooling/lsp/src/types.rs index 48c412eb5ad..e3492f21346 100644 --- a/tooling/lsp/src/types.rs +++ b/tooling/lsp/src/types.rs @@ -1,5 +1,7 @@ use fm::FileId; -use lsp_types::{DefinitionOptions, OneOf}; +use lsp_types::{ + DeclarationCapability, DefinitionOptions, OneOf, TypeDefinitionProviderCapability, +}; use noirc_driver::DebugFile; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -9,10 +11,10 @@ use std::collections::{BTreeMap, HashMap}; // Re-providing lsp_types that we don't need to override pub(crate) use lsp_types::{ - Diagnostic, DiagnosticSeverity, DidChangeConfigurationParams, DidChangeTextDocumentParams, - DidCloseTextDocumentParams, DidOpenTextDocumentParams, DidSaveTextDocumentParams, - InitializeParams, InitializedParams, LogMessageParams, MessageType, Position, - PublishDiagnosticsParams, Range, ServerInfo, TextDocumentSyncCapability, Url, + CodeLens, CodeLensOptions, CodeLensParams, Command, Diagnostic, DiagnosticSeverity, + DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, + DidOpenTextDocumentParams, DidSaveTextDocumentParams, InitializeParams, InitializedParams, + Position, PublishDiagnosticsParams, Range, ServerInfo, TextDocumentSyncCapability, Url, }; pub(crate) mod request { @@ -24,7 +26,10 @@ pub(crate) mod request { }; // Re-providing lsp_types that we don't need to override - pub(crate) use lsp_types::request::{Formatting, GotoDefinition, Shutdown}; + pub(crate) use lsp_types::request::{ + CodeLensRequest as CodeLens, Formatting, GotoDeclaration, GotoDefinition, + GotoTypeDefinition, Shutdown, + }; #[derive(Debug)] pub(crate) struct Initialize; @@ -108,10 +113,22 @@ pub(crate) struct ServerCapabilities { #[serde(skip_serializing_if = "Option::is_none")] pub(crate) text_document_sync: Option, + /// The server provides go to declaration support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) declaration_provider: Option, + /// The server provides goto definition support. #[serde(skip_serializing_if = "Option::is_none")] pub(crate) definition_provider: Option>, + /// The server provides goto type definition support. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) type_definition_provider: Option, + + /// The server provides code lens. + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) code_lens_provider: Option, + /// The server provides document formatting. pub(crate) document_formatting_provider: bool, @@ -214,4 +231,6 @@ pub(crate) struct NargoProfileRunResult { pub(crate) opcodes_counts: HashMap, } +pub(crate) type CodeLensResult = Option>; pub(crate) type GotoDefinitionResult = Option; +pub(crate) type GotoDeclarationResult = Option; diff --git a/tooling/nargo/Cargo.toml b/tooling/nargo/Cargo.toml index 48741c367a5..cd97980b9e0 100644 --- a/tooling/nargo/Cargo.toml +++ b/tooling/nargo/Cargo.toml @@ -24,9 +24,16 @@ iter-extended.workspace = true serde.workspace = true thiserror.workspace = true codespan-reporting.workspace = true +tracing.workspace = true rayon = "1.8.0" +jsonrpc.workspace = true [dev-dependencies] # TODO: This dependency is used to generate unit tests for `get_all_paths_in_dir` # TODO: once that method is moved to nargo_cli, we can move this dependency to nargo_cli -tempfile = "3.2.0" \ No newline at end of file +tempfile.workspace = true +jsonrpc-http-server = "18.0" +jsonrpc-core-client = "18.0" +jsonrpc-derive = "18.0" +jsonrpc-core = "18.0" +serial_test = "2.0" diff --git a/tooling/nargo/src/artifacts/contract.rs b/tooling/nargo/src/artifacts/contract.rs index f9e8d45b02e..d928b09fcb9 100644 --- a/tooling/nargo/src/artifacts/contract.rs +++ b/tooling/nargo/src/artifacts/contract.rs @@ -1,25 +1,38 @@ use acvm::acir::circuit::Circuit; use noirc_abi::{Abi, ContractEvent}; -use noirc_driver::ContractFunctionType; +use noirc_driver::{CompiledContract, ContractFunction, ContractFunctionType}; use serde::{Deserialize, Serialize}; -/// `PreprocessedContract` represents a Noir contract which has been preprocessed by a particular backend proving system. -/// -/// This differs from a generic Noir contract artifact in that: -/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. -/// - Proving and verification keys have been pregenerated based on this ACIR. +use noirc_driver::DebugFile; +use noirc_errors::debug_info::DebugInfo; +use std::collections::BTreeMap; + +use fm::FileId; + #[derive(Serialize, Deserialize)] -pub struct PreprocessedContract { +pub struct ContractArtifact { /// Version of noir used to compile this contract pub noir_version: String, /// The name of the contract. pub name: String, - /// The identifier of the proving backend which this contract has been compiled for. - pub backend: String, /// Each of the contract's functions are compiled into a separate program stored in this `Vec`. - pub functions: Vec, + pub functions: Vec, /// All the events defined inside the contract scope. pub events: Vec, + /// Map of file Id to the source code so locations in debug info can be mapped to source code they point to. + pub file_map: BTreeMap, +} + +impl From for ContractArtifact { + fn from(contract: CompiledContract) -> Self { + ContractArtifact { + noir_version: contract.noir_version, + name: contract.name, + functions: contract.functions.into_iter().map(ContractFunctionArtifact::from).collect(), + events: contract.events, + file_map: contract.file_map, + } + } } /// Each function in the contract will be compiled as a separate noir program. @@ -27,7 +40,7 @@ pub struct PreprocessedContract { /// A contract function unlike a regular Noir program however can have additional properties. /// One of these being a function type. #[derive(Debug, Serialize, Deserialize)] -pub struct PreprocessedContractFunction { +pub struct ContractFunctionArtifact { pub name: String, pub function_type: ContractFunctionType, @@ -41,4 +54,23 @@ pub struct PreprocessedContractFunction { deserialize_with = "Circuit::deserialize_circuit_base64" )] pub bytecode: Circuit, + + #[serde( + serialize_with = "DebugInfo::serialize_compressed_base64_json", + deserialize_with = "DebugInfo::deserialize_compressed_base64_json" + )] + pub debug_symbols: DebugInfo, +} + +impl From for ContractFunctionArtifact { + fn from(func: ContractFunction) -> Self { + ContractFunctionArtifact { + name: func.name, + function_type: func.function_type, + is_internal: func.is_internal, + abi: func.abi, + bytecode: func.bytecode, + debug_symbols: func.debug, + } + } } diff --git a/tooling/nargo/src/artifacts/debug.rs b/tooling/nargo/src/artifacts/debug.rs index afa62df51ae..ba93e30ff37 100644 --- a/tooling/nargo/src/artifacts/debug.rs +++ b/tooling/nargo/src/artifacts/debug.rs @@ -28,14 +28,12 @@ impl DebugArtifact { debug_symbols.iter().flat_map(|debug_info| debug_info.get_file_ids()).collect(); for file_id in file_ids.iter() { - let file_source = file_manager.fetch_file(*file_id).source(); + let file_path = file_manager.path(*file_id).expect("file should exist"); + let file_source = file_manager.fetch_file(*file_id).expect("file should exist"); file_map.insert( *file_id, - DebugFile { - source: file_source.to_string(), - path: file_manager.path(*file_id).to_path_buf(), - }, + DebugFile { source: file_source.to_string(), path: file_path.to_path_buf() }, ); } @@ -53,6 +51,12 @@ impl DebugArtifact { self.line_index(location.file, location_start) } + /// Given a location, returns the index of the line it ends at + pub fn location_end_line_index(&self, location: Location) -> Result { + let location_end = location.span.end() as usize; + self.line_index(location.file, location_end) + } + /// Given a location, returns the line number it starts at pub fn location_line_number(&self, location: Location) -> Result { let location_start = location.span.start() as usize; @@ -76,12 +80,28 @@ impl DebugArtifact { let line_index = self.line_index(location.file, location_start)?; let line_span = self.line_range(location.file, line_index)?; + let line_length = line_span.end - (line_span.start + 1); let start_in_line = location_start - line_span.start; + + // The location might continue beyond the line, + // so we need a bounds check let end_in_line = location_end - line_span.start; + let end_in_line = std::cmp::min(end_in_line, line_length); Ok(Range { start: start_in_line, end: end_in_line }) } + /// Given a location, returns a Span relative to its last line's + /// position in the file. This is useful when processing a file's + /// contents on a per-line-basis. + pub fn location_in_end_line(&self, location: Location) -> Result, Error> { + let end_line_index = self.location_end_line_index(location)?; + let line_span = self.line_range(location.file, end_line_index)?; + let location_end = location.span.end() as usize; + let end_in_line = location_end - line_span.start; + Ok(Range { start: 0, end: end_in_line }) + } + /// Given a location, returns the last line index /// of its file pub fn last_line_index(&self, location: Location) -> Result { @@ -151,3 +171,70 @@ impl<'a> Files<'a> for DebugArtifact { }) } } + +#[cfg(test)] +mod tests { + use crate::artifacts::debug::DebugArtifact; + use acvm::acir::circuit::OpcodeLocation; + use fm::FileManager; + use noirc_errors::{debug_info::DebugInfo, Location, Span}; + use std::collections::BTreeMap; + use std::ops::Range; + use std::path::Path; + use std::path::PathBuf; + use tempfile::{tempdir, TempDir}; + + // Returns the absolute path to the file + fn create_dummy_file(dir: &TempDir, file_name: &Path) -> PathBuf { + let file_path = dir.path().join(file_name); + let _file = std::fs::File::create(&file_path).unwrap(); + file_path + } + + // Tests that location_in_line correctly handles + // locations spanning multiple lines. + // For example, given the snippet: + // ``` + // permute( + // consts::x5_2_config(), + // state); + // ``` + // We want location_in_line to return the range + // containing `permute(` + #[test] + fn location_in_line_stops_at_end_of_line() { + let source_code = r##"pub fn main(mut state: [Field; 2]) -> [Field; 2] { + state = permute( + consts::x5_2_config(), + state); + + state +}"##; + + let dir = tempdir().unwrap(); + let file_name = Path::new("main.nr"); + create_dummy_file(&dir, file_name); + + let mut fm = FileManager::new(dir.path()); + let file_id = fm.add_file_with_source(file_name, source_code.to_string()).unwrap(); + + // Location of + // ``` + // permute( + // consts::x5_2_config(), + // state) + // ``` + let loc = Location::new(Span::inclusive(63, 116), file_id); + + // We don't care about opcodes in this context, + // we just use a dummy to construct debug_symbols + let mut opcode_locations = BTreeMap::>::new(); + opcode_locations.insert(OpcodeLocation::Acir(42), vec![loc]); + + let debug_symbols = vec![DebugInfo::new(opcode_locations, (vec![], vec![]))]; + let debug_artifact = DebugArtifact::new(debug_symbols, &fm); + + let location_in_line = debug_artifact.location_in_line(loc).expect("Expected a range"); + assert_eq!(location_in_line, Range { start: 12, end: 20 }); + } +} diff --git a/tooling/nargo/src/artifacts/program.rs b/tooling/nargo/src/artifacts/program.rs index 890b6c55f7d..d8dc42ec214 100644 --- a/tooling/nargo/src/artifacts/program.rs +++ b/tooling/nargo/src/artifacts/program.rs @@ -1,23 +1,23 @@ +use std::collections::BTreeMap; + use acvm::acir::circuit::Circuit; +use fm::FileId; use noirc_abi::Abi; +use noirc_driver::CompiledProgram; +use noirc_driver::DebugFile; +use noirc_errors::debug_info::DebugInfo; use serde::{Deserialize, Serialize}; -/// `PreprocessedProgram` represents a Noir program which has been preprocessed by a particular backend proving system. -/// -/// This differs from a generic Noir program artifact in that: -/// - The ACIR bytecode has had an optimization pass applied to tailor it for the backend. -/// - Proving and verification keys have been pregenerated based on this ACIR. #[derive(Serialize, Deserialize, Debug)] -pub struct PreprocessedProgram { +pub struct ProgramArtifact { pub noir_version: String, - /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`PreprocessedProgram`] + /// Hash of the [`Program`][noirc_frontend::monomorphization::ast::Program] from which this [`ProgramArtifact`] /// was compiled. /// /// Used to short-circuit compilation in the case of the source code not changing since the last compilation. pub hash: u64, - pub backend: String, pub abi: Abi, #[serde( @@ -25,4 +25,40 @@ pub struct PreprocessedProgram { deserialize_with = "Circuit::deserialize_circuit_base64" )] pub bytecode: Circuit, + + #[serde( + serialize_with = "DebugInfo::serialize_compressed_base64_json", + deserialize_with = "DebugInfo::deserialize_compressed_base64_json" + )] + pub debug_symbols: DebugInfo, + + /// Map of file Id to the source code so locations in debug info can be mapped to source code they point to. + pub file_map: BTreeMap, +} + +impl From for ProgramArtifact { + fn from(program: CompiledProgram) -> Self { + ProgramArtifact { + hash: program.hash, + abi: program.abi, + noir_version: program.noir_version, + bytecode: program.circuit, + debug_symbols: program.debug, + file_map: program.file_map, + } + } +} + +impl From for CompiledProgram { + fn from(program: ProgramArtifact) -> Self { + CompiledProgram { + hash: program.hash, + abi: program.abi, + noir_version: program.noir_version, + circuit: program.bytecode, + debug: program.debug_symbols, + file_map: program.file_map, + warnings: vec![], + } + } } diff --git a/tooling/nargo/src/constants.rs b/tooling/nargo/src/constants.rs index ff8da403c69..0b50d61fe37 100644 --- a/tooling/nargo/src/constants.rs +++ b/tooling/nargo/src/constants.rs @@ -7,6 +7,8 @@ pub const PROOFS_DIR: &str = "proofs"; pub const SRC_DIR: &str = "src"; /// The directory to store circuits' serialized ACIR representations. pub const TARGET_DIR: &str = "target"; +/// The directory to store serialized ACIR representations of exported library functions. +pub const EXPORT_DIR: &str = "export"; // Files /// The file from which Nargo pulls prover inputs diff --git a/tooling/nargo/src/errors.rs b/tooling/nargo/src/errors.rs index bca8ca24767..c743768bee2 100644 --- a/tooling/nargo/src/errors.rs +++ b/tooling/nargo/src/errors.rs @@ -47,12 +47,6 @@ pub enum NargoError { ForeignCallError(#[from] ForeignCallError), } -impl From for NargoError { - fn from(_: acvm::compiler::CompileError) -> Self { - NargoError::CompilationError - } -} - impl NargoError { /// Extracts the user defined failure message from the ExecutionError /// If one exists. @@ -69,7 +63,6 @@ impl NargoError { ExecutionError::AssertionFailed(message, _) => Some(message), ExecutionError::SolvingError(error) => match error { OpcodeResolutionError::IndexOutOfBounds { .. } - | OpcodeResolutionError::UnsupportedBlackBoxFunc(_) | OpcodeResolutionError::OpcodeNotSolvable(_) | OpcodeResolutionError::UnsatisfiedConstrain { .. } => None, OpcodeResolutionError::BrilligFunctionFailed { message, .. } => Some(message), diff --git a/tooling/nargo/src/lib.rs b/tooling/nargo/src/lib.rs index b21fb329939..0fdff8b202f 100644 --- a/tooling/nargo/src/lib.rs +++ b/tooling/nargo/src/lib.rs @@ -19,10 +19,11 @@ use std::collections::BTreeMap; use fm::FileManager; use noirc_driver::{add_dep, prepare_crate, prepare_dependency}; use noirc_frontend::{ - graph::{CrateGraph, CrateId, CrateName}, - hir::Context, + graph::{CrateId, CrateName}, + hir::{def_map::parse_file, Context, ParsedFiles}, }; use package::{Dependency, Package}; +use rayon::prelude::*; pub use self::errors::NargoError; @@ -42,13 +43,21 @@ pub fn prepare_dependencies( } } +pub fn insert_all_files_for_workspace_into_file_manager( + workspace: &workspace::Workspace, + file_manager: &mut FileManager, +) { + for package in workspace.clone().into_iter() { + insert_all_files_for_package_into_file_manager(package, file_manager); + } +} // We will pre-populate the file manager with all the files in the package // This is so that we can avoid having to read from disk when we are compiling // // This does not require parsing because we are interested in the files under the src directory // it may turn out that we do not need to include some Noir files that we add to the file // manager -pub fn insert_all_files_for_package_into_file_manager( +fn insert_all_files_for_package_into_file_manager( package: &Package, file_manager: &mut FileManager, ) { @@ -87,15 +96,29 @@ fn insert_all_files_for_packages_dependencies_into_file_manager( } } -pub fn prepare_package(package: &Package) -> (Context, CrateId) { - let mut fm = FileManager::new(&package.root_dir); - insert_all_files_for_package_into_file_manager(package, &mut fm); +pub fn parse_all(file_manager: &FileManager) -> ParsedFiles { + file_manager + .as_file_map() + .all_file_ids() + .par_bridge() + .filter(|&&file_id| { + let file_path = file_manager.path(file_id).expect("expected file to exist"); + let file_extension = + file_path.extension().expect("expected all file paths to have an extension"); + file_extension == "nr" + }) + .map(|&file_id| (file_id, parse_file(file_manager, file_id))) + .collect() +} - let graph = CrateGraph::default(); - let mut context = Context::new(fm, graph); +pub fn prepare_package<'file_manager, 'parsed_files>( + file_manager: &'file_manager FileManager, + parsed_files: &'parsed_files ParsedFiles, + package: &Package, +) -> (Context<'file_manager, 'parsed_files>, CrateId) { + let mut context = Context::from_ref_file_manager(file_manager, parsed_files); let crate_id = prepare_crate(&mut context, &package.entry_path); - context.root_crate_id = crate_id; prepare_dependencies(&mut context, crate_id, &package.dependencies); @@ -140,10 +163,10 @@ mod tests { use tempfile::tempdir; fn create_test_dir_structure(temp_dir: &Path) -> std::io::Result<()> { - fs::create_dir(temp_dir.join("subdir1"))?; - File::create(temp_dir.join("subdir1/file1.txt"))?; - fs::create_dir(temp_dir.join("subdir2"))?; - File::create(temp_dir.join("subdir2/file2.txt"))?; + fs::create_dir(temp_dir.join("sub_dir1"))?; + File::create(temp_dir.join("sub_dir1/file1.txt"))?; + fs::create_dir(temp_dir.join("sub_dir2"))?; + File::create(temp_dir.join("sub_dir2/file2.txt"))?; File::create(temp_dir.join("file3.txt"))?; Ok(()) } @@ -160,8 +183,8 @@ mod tests { // This should be the paths to all of the files in the directory and the subdirectory let expected_paths = vec![ temp_dir.path().join("file3.txt"), - temp_dir.path().join("subdir1/file1.txt"), - temp_dir.path().join("subdir2/file2.txt"), + temp_dir.path().join("sub_dir1/file1.txt"), + temp_dir.path().join("sub_dir2/file2.txt"), ]; assert_eq!(paths.len(), expected_paths.len()); diff --git a/tooling/nargo/src/ops/compile.rs b/tooling/nargo/src/ops/compile.rs index 59ac5672a11..247e731bb1a 100644 --- a/tooling/nargo/src/ops/compile.rs +++ b/tooling/nargo/src/ops/compile.rs @@ -1,6 +1,8 @@ -use acvm::{acir::circuit::Opcode, Language}; +use acvm::ExpressionWidth; use fm::FileManager; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; +use noirc_frontend::debug::DebugState; +use noirc_frontend::hir::ParsedFiles; use crate::errors::CompileError; use crate::prepare_package; @@ -14,35 +16,46 @@ use rayon::prelude::*; /// /// This function will return an error if there are any compilations errors reported. pub fn compile_workspace( + file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, - binary_packages: &[Package], - contract_packages: &[Package], - np_language: Language, - is_opcode_supported: impl Fn(&Opcode) -> bool + std::marker::Sync, + expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CompileError> { + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages .par_iter() .map(|package| { - compile_program(workspace, package, compile_options, np_language, &is_opcode_supported) + compile_program( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + None, + ) + }) + .collect(); + let contract_results: Vec> = contract_packages + .par_iter() + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) }) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| { - compile_contract(package, compile_options, np_language, &is_opcode_supported) - }) - .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -50,10 +63,10 @@ pub fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -64,53 +77,59 @@ pub fn compile_workspace( } pub fn compile_program( - workspace: &Workspace, + file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); + expression_width: ExpressionWidth, + cached_program: Option, +) -> CompilationResult { + compile_program_with_debug_state( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + cached_program, + DebugState::default(), + ) +} - let program_artifact_path = workspace.package_build_path(package); - let mut debug_artifact_path = program_artifact_path.clone(); - debug_artifact_path.set_file_name(format!("debug_{}.json", package.name)); +pub fn compile_program_with_debug_state( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + package: &Package, + compile_options: &CompileOptions, + expression_width: ExpressionWidth, + cached_program: Option, + debug_state: DebugState, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); + context.debug_state = debug_state; let (program, warnings) = - match noirc_driver::compile_main(&mut context, crate_id, compile_options, None, true) { - Ok(program_and_warnings) => program_and_warnings, - Err(errors) => { - return (context.file_manager, Err(errors)); - } - }; + noirc_driver::compile_main(&mut context, crate_id, compile_options, cached_program)?; // Apply backend specific optimizations. - let optimized_program = crate::ops::optimize_program(program, np_language, is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); + let optimized_program = crate::ops::optimize_program(program, expression_width); - (context.file_manager, Ok((optimized_program, warnings))) + Ok((optimized_program, warnings)) } -fn compile_contract( +pub fn compile_contract( + file_manager: &FileManager, + parsed_files: &ParsedFiles, package: &Package, compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); + expression_width: ExpressionWidth, +) -> CompilationResult { + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); let (contract, warnings) = - match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { - Ok(contracts_and_warnings) => contracts_and_warnings, - Err(errors) => { - return (context.file_manager, Err(errors)); - } - }; - - let optimized_contract = - crate::ops::optimize_contract(contract, np_language, &is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); - - (context.file_manager, Ok((optimized_contract, warnings))) + noirc_driver::compile_contract(&mut context, crate_id, compile_options)?; + + let optimized_contract = crate::ops::optimize_contract(contract, expression_width); + + Ok((optimized_contract, warnings)) } pub(crate) fn report_errors( diff --git a/tooling/nargo/src/ops/execute.rs b/tooling/nargo/src/ops/execute.rs index d7cb44188c4..4fc7f7b599f 100644 --- a/tooling/nargo/src/ops/execute.rs +++ b/tooling/nargo/src/ops/execute.rs @@ -7,6 +7,7 @@ use crate::NargoError; use super::foreign_calls::ForeignCallExecutor; +#[tracing::instrument(level = "trace", skip_all)] pub fn execute_circuit( circuit: &Circuit, initial_witness: WitnessMap, @@ -54,6 +55,5 @@ pub fn execute_circuit( } } - let solved_witness = acvm.finalize(); - Ok(solved_witness) + Ok(acvm.finalize()) } diff --git a/tooling/nargo/src/ops/foreign_calls.rs b/tooling/nargo/src/ops/foreign_calls.rs index d2f035957e8..056df976264 100644 --- a/tooling/nargo/src/ops/foreign_calls.rs +++ b/tooling/nargo/src/ops/foreign_calls.rs @@ -1,8 +1,8 @@ -use crate::artifacts::debug::DebugVars; use acvm::{ acir::brillig::{ForeignCallParam, ForeignCallResult, Value}, pwg::ForeignCallWaitInfo, }; +use jsonrpc::{arg as build_json_rpc_arg, minreq_http::Builder, Client}; use noirc_printable_type::{decode_string_value, ForeignCallError, PrintableValueDisplay}; pub trait ForeignCallExecutor { @@ -10,21 +10,11 @@ pub trait ForeignCallExecutor { &mut self, foreign_call: &ForeignCallWaitInfo, ) -> Result; - - fn execute_with_debug_vars( - &mut self, - foreign_call: &ForeignCallWaitInfo, - debug_vars: &mut DebugVars, - ) -> Result; } /// This enumeration represents the Brillig foreign calls that are natively supported by nargo. /// After resolution of a foreign call, nargo will restart execution of the ACVM -pub(crate) enum ForeignCall { - DebugVarAssign, - DebugVarDrop, - DebugMemberAssign(u32), - DebugDerefAssign, +pub enum ForeignCall { Print, CreateMock, SetMockParams, @@ -42,18 +32,6 @@ impl std::fmt::Display for ForeignCall { impl ForeignCall { pub(crate) fn name(&self) -> &'static str { match self { - ForeignCall::DebugVarAssign => "__debug_var_assign", - ForeignCall::DebugVarDrop => "__debug_var_drop", - ForeignCall::DebugMemberAssign(1) => "__debug_member_assign_1", - ForeignCall::DebugMemberAssign(2) => "__debug_member_assign_2", - ForeignCall::DebugMemberAssign(3) => "__debug_member_assign_3", - ForeignCall::DebugMemberAssign(4) => "__debug_member_assign_4", - ForeignCall::DebugMemberAssign(5) => "__debug_member_assign_5", - ForeignCall::DebugMemberAssign(6) => "__debug_member_assign_6", - ForeignCall::DebugMemberAssign(7) => "__debug_member_assign_7", - ForeignCall::DebugMemberAssign(8) => "__debug_member_assign_8", - ForeignCall::DebugMemberAssign(_) => panic!("unsupported member assignment arity"), - ForeignCall::DebugDerefAssign => "__debug_deref_assign", ForeignCall::Print => "print", ForeignCall::CreateMock => "create_mock", ForeignCall::SetMockParams => "set_mock_params", @@ -64,16 +42,7 @@ impl ForeignCall { } pub(crate) fn lookup(op_name: &str) -> Option { - let member_pre = "__debug_member_assign_"; - if let Some(op_suffix) = op_name.strip_prefix(member_pre) { - let arity = - op_suffix.parse::().expect("failed to parse debug_member_assign arity"); - return Some(ForeignCall::DebugMemberAssign(arity)); - } match op_name { - "__debug_var_assign" => Some(ForeignCall::DebugVarAssign), - "__debug_var_drop" => Some(ForeignCall::DebugVarDrop), - "__debug_deref_assign" => Some(ForeignCall::DebugDerefAssign), "print" => Some(ForeignCall::Print), "create_mock" => Some(ForeignCall::CreateMock), "set_mock_params" => Some(ForeignCall::SetMockParams), @@ -126,17 +95,59 @@ pub struct DefaultForeignCallExecutor { mocked_responses: Vec, /// Whether to print [`ForeignCall::Print`] output. show_output: bool, + /// JSON RPC client to resolve foreign calls + external_resolver: Option, +} + +impl DefaultForeignCallExecutor { + pub fn new(show_output: bool, resolver_url: Option<&str>) -> Self { + let oracle_resolver = resolver_url.map(|resolver_url| { + let transport_builder = + Builder::new().url(resolver_url).expect("Invalid oracle resolver URL"); + Client::with_transport(transport_builder.build()) + }); + DefaultForeignCallExecutor { + show_output, + external_resolver: oracle_resolver, + ..DefaultForeignCallExecutor::default() + } + } } impl DefaultForeignCallExecutor { - pub fn new(show_output: bool) -> Self { - DefaultForeignCallExecutor { show_output, ..DefaultForeignCallExecutor::default() } + fn extract_mock_id( + foreign_call_inputs: &[ForeignCallParam], + ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { + let (id, params) = + foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; + Ok((id.unwrap_value().to_usize(), params)) + } + + fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { + self.mocked_responses.iter_mut().find(|response| response.id == id) + } + + fn parse_string(param: &ForeignCallParam) -> String { + let fields: Vec<_> = param.values().into_iter().map(|value| value.to_field()).collect(); + decode_string_value(&fields) + } + + fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { + let skip_newline = foreign_call_inputs[0].unwrap_value().is_zero(); + let display_values: PrintableValueDisplay = foreign_call_inputs + .split_first() + .ok_or(ForeignCallError::MissingForeignCallInputs)? + .1 + .try_into()?; + print!("{display_values}{}", if skip_newline { "" } else { "\n" }); + Ok(()) } +} - fn execute_optional_debug_vars( +impl ForeignCallExecutor for DefaultForeignCallExecutor { + fn execute( &mut self, foreign_call: &ForeignCallWaitInfo, - debug_vars: Option<&mut DebugVars>, ) -> Result { let foreign_call_name = foreign_call.function.as_str(); match ForeignCall::lookup(foreign_call_name) { @@ -146,62 +157,6 @@ impl DefaultForeignCallExecutor { } Ok(ForeignCallResult { values: vec![] }) } - Some(ForeignCall::DebugVarAssign) => { - let fcp_var_id = &foreign_call.inputs[0]; - if let (Some(ds), ForeignCallParam::Single(var_id_value)) = (debug_vars, fcp_var_id) - { - let var_id = var_id_value.to_u128() as u32; - let values: Vec = - foreign_call.inputs[1..].iter().flat_map(|x| x.values()).collect(); - ds.assign(var_id, &values); - } - Ok(ForeignCallResult { values: vec![] }) - } - Some(ForeignCall::DebugVarDrop) => { - let fcp_var_id = &foreign_call.inputs[0]; - if let (Some(ds), ForeignCallParam::Single(var_id_value)) = (debug_vars, fcp_var_id) - { - let var_id = var_id_value.to_u128() as u32; - ds.drop(var_id); - } - Ok(ForeignCallResult { values: vec![] }) - } - Some(ForeignCall::DebugMemberAssign(arity)) => { - if let (Some(ds), Some(ForeignCallParam::Single(var_id_value))) = - (debug_vars, foreign_call.inputs.get(0)) - { - let arity = arity as usize; - let var_id = var_id_value.to_u128() as u32; - let n = foreign_call.inputs.len(); - let indexes: Vec = foreign_call.inputs[(n - arity)..n] - .iter() - .map(|fcp_v| { - if let ForeignCallParam::Single(v) = fcp_v { - v.to_u128() as u32 - } else { - panic!("expected ForeignCallParam::Single(v)"); - } - }) - .collect(); - let values: Vec = (0..n - 1 - arity) - .flat_map(|i| { - foreign_call.inputs.get(1 + i).map(|fci| fci.values()).unwrap_or(vec![]) - }) - .collect(); - ds.assign_field(var_id, indexes, &values); - } - Ok(ForeignCallResult { values: vec![] }) - } - Some(ForeignCall::DebugDerefAssign) => { - let fcp_var_id = &foreign_call.inputs[0]; - let fcp_value = &foreign_call.inputs[1]; - if let (Some(ds), ForeignCallParam::Single(var_id_value)) = (debug_vars, fcp_var_id) - { - let var_id = var_id_value.to_u128() as u32; - ds.assign_deref(var_id, &fcp_value.values()); - } - Ok(ForeignCallResult { values: vec![] }) - } Some(ForeignCall::CreateMock) => { let mock_oracle_name = Self::parse_string(&foreign_call.inputs[0]); assert!(ForeignCall::lookup(&mock_oracle_name).is_none()); @@ -247,74 +202,136 @@ impl DefaultForeignCallExecutor { Ok(ForeignCallResult { values: vec![] }) } None => { - let response_position = self + let mock_response_position = self .mocked_responses .iter() - .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)) - .unwrap_or_else(|| panic!("Unknown foreign call {}", foreign_call_name)); + .position(|response| response.matches(foreign_call_name, &foreign_call.inputs)); - let mock = self - .mocked_responses - .get_mut(response_position) - .expect("Invalid position of mocked response"); - let result = mock.result.values.clone(); - - if let Some(times_left) = &mut mock.times_left { - *times_left -= 1; - if *times_left == 0 { - self.mocked_responses.remove(response_position); + match (mock_response_position, &self.external_resolver) { + (Some(response_position), _) => { + let mock = self + .mocked_responses + .get_mut(response_position) + .expect("Invalid position of mocked response"); + let result = mock.result.values.clone(); + + if let Some(times_left) = &mut mock.times_left { + *times_left -= 1; + if *times_left == 0 { + self.mocked_responses.remove(response_position); + } + } + + Ok(ForeignCallResult { values: result }) } - } + (None, Some(external_resolver)) => { + let encoded_params: Vec<_> = + foreign_call.inputs.iter().map(build_json_rpc_arg).collect(); + + let req = + external_resolver.build_request(foreign_call_name, &encoded_params); + + let response = external_resolver.send_request(req)?; + + let parsed_response: ForeignCallResult = response.result()?; - Ok(ForeignCallResult { values: result }) + Ok(parsed_response) + } + (None, None) => panic!("Unknown foreign call {}", foreign_call_name), + } } } } } -impl DefaultForeignCallExecutor { - fn extract_mock_id( - foreign_call_inputs: &[ForeignCallParam], - ) -> Result<(usize, &[ForeignCallParam]), ForeignCallError> { - let (id, params) = - foreign_call_inputs.split_first().ok_or(ForeignCallError::MissingForeignCallInputs)?; - Ok((id.unwrap_value().to_usize(), params)) - } +#[cfg(test)] +mod tests { + use acvm::{ + acir::brillig::ForeignCallParam, + brillig_vm::brillig::{ForeignCallResult, Value}, + pwg::ForeignCallWaitInfo, + FieldElement, + }; + use jsonrpc_core::Result as RpcResult; + use jsonrpc_derive::rpc; + use jsonrpc_http_server::{Server, ServerBuilder}; + use serial_test::serial; - fn find_mock_by_id(&mut self, id: usize) -> Option<&mut MockedCall> { - self.mocked_responses.iter_mut().find(|response| response.id == id) + use crate::ops::{DefaultForeignCallExecutor, ForeignCallExecutor}; + + #[allow(unreachable_pub)] + #[rpc] + pub trait OracleResolver { + #[rpc(name = "echo")] + fn echo(&self, param: ForeignCallParam) -> RpcResult; + + #[rpc(name = "sum")] + fn sum(&self, array: ForeignCallParam) -> RpcResult; } - fn parse_string(param: &ForeignCallParam) -> String { - let fields: Vec<_> = param.values().into_iter().map(|value| value.to_field()).collect(); - decode_string_value(&fields) + struct OracleResolverImpl; + + impl OracleResolver for OracleResolverImpl { + fn echo(&self, param: ForeignCallParam) -> RpcResult { + Ok(vec![param].into()) + } + + fn sum(&self, array: ForeignCallParam) -> RpcResult { + let mut res: FieldElement = 0_usize.into(); + + for value in array.values() { + res += value.to_field(); + } + + Ok(Value::from(res).into()) + } } - fn execute_print(foreign_call_inputs: &[ForeignCallParam]) -> Result<(), ForeignCallError> { - let skip_newline = foreign_call_inputs[0].unwrap_value().is_zero(); - let display_values: PrintableValueDisplay = foreign_call_inputs - .split_first() - .ok_or(ForeignCallError::MissingForeignCallInputs)? - .1 - .try_into()?; - print!("{display_values}{}", if skip_newline { "" } else { "\n" }); - Ok(()) + fn build_oracle_server() -> (Server, String) { + let mut io = jsonrpc_core::IoHandler::new(); + io.extend_with(OracleResolverImpl.to_delegate()); + + let server = ServerBuilder::new(io) + .start_http(&"127.0.0.1:5555".parse().expect("Invalid address")) + .expect("Could not start server"); + + let url = format!("http://{}", server.address()); + (server, url) } -} -impl ForeignCallExecutor for DefaultForeignCallExecutor { - fn execute( - &mut self, - foreign_call: &ForeignCallWaitInfo, - ) -> Result { - self.execute_optional_debug_vars(foreign_call, None) + #[serial] + #[test] + fn test_oracle_resolver_echo() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "echo".to_string(), + inputs: vec![ForeignCallParam::Single(1_u128.into())], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), ForeignCallResult { values: foreign_call.inputs }); + + server.close(); } - fn execute_with_debug_vars( - &mut self, - foreign_call: &ForeignCallWaitInfo, - debug_vars: &mut DebugVars, - ) -> Result { - self.execute_optional_debug_vars(foreign_call, Some(debug_vars)) + #[serial] + #[test] + fn test_oracle_resolver_sum() { + let (server, url) = build_oracle_server(); + + let mut executor = DefaultForeignCallExecutor::new(false, Some(&url)); + + let foreign_call = ForeignCallWaitInfo { + function: "sum".to_string(), + inputs: vec![ForeignCallParam::Array(vec![1_usize.into(), 2_usize.into()])], + }; + + let result = executor.execute(&foreign_call); + assert_eq!(result.unwrap(), Value::from(3_usize).into()); + + server.close(); } } diff --git a/tooling/nargo/src/ops/mod.rs b/tooling/nargo/src/ops/mod.rs index 34487ed9770..d960a7a9f45 100644 --- a/tooling/nargo/src/ops/mod.rs +++ b/tooling/nargo/src/ops/mod.rs @@ -1,6 +1,8 @@ -pub use self::compile::{compile_program, compile_workspace}; +pub use self::compile::{ + compile_contract, compile_program, compile_program_with_debug_state, compile_workspace, +}; pub use self::execute::execute_circuit; -pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCallExecutor}; +pub use self::foreign_calls::{DefaultForeignCallExecutor, ForeignCall, ForeignCallExecutor}; pub use self::optimize::{optimize_contract, optimize_program}; pub use self::test::{run_test, TestStatus}; diff --git a/tooling/nargo/src/ops/optimize.rs b/tooling/nargo/src/ops/optimize.rs index 54e2432aa40..d3a36dd65ac 100644 --- a/tooling/nargo/src/ops/optimize.rs +++ b/tooling/nargo/src/ops/optimize.rs @@ -1,34 +1,30 @@ -use acvm::{acir::circuit::Opcode, Language}; -use iter_extended::try_vecmap; +use acvm::ExpressionWidth; +use iter_extended::vecmap; use noirc_driver::{CompiledContract, CompiledProgram}; -use crate::NargoError; - pub fn optimize_program( mut program: CompiledProgram, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result { + expression_width: ExpressionWidth, +) -> CompiledProgram { let (optimized_circuit, location_map) = - acvm::compiler::compile(program.circuit, np_language, is_opcode_supported)?; + acvm::compiler::compile(program.circuit, expression_width); program.circuit = optimized_circuit; program.debug.update_acir(location_map); - Ok(program) + program } pub fn optimize_contract( contract: CompiledContract, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> Result { - let functions = try_vecmap(contract.functions, |mut func| { + expression_width: ExpressionWidth, +) -> CompiledContract { + let functions = vecmap(contract.functions, |mut func| { let (optimized_bytecode, location_map) = - acvm::compiler::compile(func.bytecode, np_language, is_opcode_supported)?; + acvm::compiler::compile(func.bytecode, expression_width); func.bytecode = optimized_bytecode; func.debug.update_acir(location_map); - Ok::<_, NargoError>(func) - })?; + func + }); - Ok(CompiledContract { functions, ..contract }) + CompiledContract { functions, ..contract } } diff --git a/tooling/nargo/src/ops/test.rs b/tooling/nargo/src/ops/test.rs index a0ca24e04ef..0929739a6ab 100644 --- a/tooling/nargo/src/ops/test.rs +++ b/tooling/nargo/src/ops/test.rs @@ -19,6 +19,7 @@ pub fn run_test( context: &mut Context, test_function: TestFunction, show_output: bool, + foreign_call_resolver_url: Option<&str>, config: &CompileOptions, ) -> TestStatus { let program = compile_no_check(context, config, test_function.get_id(), None, false); @@ -30,7 +31,7 @@ pub fn run_test( &program.circuit, WitnessMap::new(), blackbox_solver, - &mut DefaultForeignCallExecutor::new(show_output), + &mut DefaultForeignCallExecutor::new(show_output, foreign_call_resolver_url), ); test_status_program_compile_pass(test_function, program.debug, circuit_execution) } diff --git a/tooling/nargo/src/workspace.rs b/tooling/nargo/src/workspace.rs index 65f9ab7e0d9..5696a758531 100644 --- a/tooling/nargo/src/workspace.rs +++ b/tooling/nargo/src/workspace.rs @@ -10,7 +10,7 @@ use std::{ }; use crate::{ - constants::{CONTRACT_DIR, PROOFS_DIR, TARGET_DIR}, + constants::{CONTRACT_DIR, EXPORT_DIR, PROOFS_DIR, TARGET_DIR}, package::Package, }; @@ -40,6 +40,10 @@ impl Workspace { pub fn target_directory_path(&self) -> PathBuf { self.root_dir.join(TARGET_DIR) } + + pub fn export_directory_path(&self) -> PathBuf { + self.root_dir.join(EXPORT_DIR) + } } pub enum IntoIter<'a, T> { diff --git a/tooling/nargo_cli/Cargo.toml b/tooling/nargo_cli/Cargo.toml index 5ec0148d81a..6e022f090f0 100644 --- a/tooling/nargo_cli/Cargo.toml +++ b/tooling/nargo_cli/Cargo.toml @@ -32,7 +32,7 @@ noirc_frontend.workspace = true noirc_abi.workspace = true noirc_errors.workspace = true acvm.workspace = true -barretenberg_blackbox_solver.workspace = true +bn254_blackbox_solver.workspace = true toml.workspace = true serde.workspace = true serde_json.workspace = true @@ -46,24 +46,21 @@ hex.workspace = true similar-asserts.workspace = true termcolor = "1.1.2" color-eyre = "0.6.2" -env_logger = "0.9.0" tokio = { version = "1.0", features = ["io-std"] } dap.workspace = true # Backends backend-interface = { path = "../backend_interface" } -bb_abstraction_leaks.workspace = true # Logs -tracing.workspace = true -tracing-subscriber = "0.3.18" +tracing-subscriber.workspace = true tracing-appender = "0.2.3" [target.'cfg(not(unix))'.dependencies] tokio-util = { version = "0.7.8", features = ["compat"] } [dev-dependencies] -tempfile = "3.6.0" +tempfile.workspace = true dirs.workspace = true assert_cmd = "2.0.8" assert_fs = "1.0.10" @@ -77,7 +74,7 @@ pprof = { version = "0.12", features = [ "criterion", ] } iai = "0.1.1" -test-binary = "3.0.1" +test-binary = "3.0.2" [[bench]] name = "criterion" diff --git a/tooling/nargo_cli/build.rs b/tooling/nargo_cli/build.rs index 6f6d65ee89c..57aa487f66a 100644 --- a/tooling/nargo_cli/build.rs +++ b/tooling/nargo_cli/build.rs @@ -14,9 +14,6 @@ fn check_rustc_version() { const GIT_COMMIT: &&str = &"GIT_COMMIT"; fn main() { - // Rebuild if the tests have changed - println!("cargo:rerun-if-changed=tests"); - check_rustc_version(); // Only use build_data if the environment variable isn't set @@ -39,6 +36,10 @@ fn main() { }; let test_dir = root_dir.join("test_programs"); + // Rebuild if the tests have changed + println!("cargo:rerun-if-changed=tests"); + println!("cargo:rerun-if-changed={}", test_dir.as_os_str().to_str().unwrap()); + generate_execution_success_tests(&mut test_file, &test_dir); generate_noir_test_success_tests(&mut test_file, &test_dir); generate_noir_test_failure_tests(&mut test_file, &test_dir); @@ -74,7 +75,7 @@ fn execution_success_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().success(); }} @@ -193,11 +194,12 @@ fn compile_success_empty_{test_name}() {{ cmd.arg("--program-dir").arg(test_program_dir); cmd.arg("info"); cmd.arg("--json"); + cmd.arg("--force"); let output = cmd.output().expect("Failed to execute command"); if !output.status.success() {{ - panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap()); + panic!("`nargo info` failed with: {{}}", String::from_utf8(output.stderr).unwrap_or_default()); }} // `compile_success_empty` tests should be able to compile down to an empty circuit. @@ -205,7 +207,7 @@ fn compile_success_empty_{test_name}() {{ panic!("JSON was not well-formatted {{:?}}",output.stdout) }}); let num_opcodes = &json["programs"][0]["acir_opcodes"]; - assert_eq!(num_opcodes.as_u64().unwrap(), 0); + assert_eq!(num_opcodes.as_u64().expect("number of opcodes should fit in a u64"), 0); }} "#, test_dir = test_dir.display(), @@ -241,7 +243,7 @@ fn compile_success_contract_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("compile"); + cmd.arg("compile").arg("--force"); cmd.assert().success(); }} @@ -279,7 +281,7 @@ fn compile_failure_{test_name}() {{ let mut cmd = Command::cargo_bin("nargo").unwrap(); cmd.env("NARGO_BACKEND_PATH", path_to_mock_backend()); cmd.arg("--program-dir").arg(test_program_dir); - cmd.arg("execute"); + cmd.arg("execute").arg("--force"); cmd.assert().failure().stderr(predicate::str::contains("The application panicked (crashed).").not()); }} diff --git a/tooling/nargo_cli/src/backends.rs b/tooling/nargo_cli/src/backends.rs index 8b1da2cd118..2b3e9d8861f 100644 --- a/tooling/nargo_cli/src/backends.rs +++ b/tooling/nargo_cli/src/backends.rs @@ -7,7 +7,7 @@ fn active_backend_file_path() -> PathBuf { backends_directory().join(".selected_backend") } -pub(crate) const ACVM_BACKEND_BARRETENBERG: &str = "acvm-backend-barretenberg"; +pub(crate) use backend_interface::ACVM_BACKEND_BARRETENBERG; pub(crate) fn clear_active_backend() { let active_backend_file = active_backend_file_path(); diff --git a/tooling/nargo_cli/src/cli/check_cmd.rs b/tooling/nargo_cli/src/cli/check_cmd.rs index 0ea8186a237..a8b9dbdeeb2 100644 --- a/tooling/nargo_cli/src/cli/check_cmd.rs +++ b/tooling/nargo_cli/src/cli/check_cmd.rs @@ -2,16 +2,21 @@ use crate::backends::Backend; use crate::errors::CliError; use clap::Args; +use fm::FileManager; use iter_extended::btree_map; -use nargo::{errors::CompileError, package::Package, prepare_package}; +use nargo::{ + errors::CompileError, insert_all_files_for_workspace_into_file_manager, package::Package, + parse_all, prepare_package, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::{AbiParameter, AbiType, MAIN_RETURN_NAME}; use noirc_driver::{ - check_crate, compute_function_abi, CompileOptions, NOIR_ARTIFACT_VERSION_STRING, + check_crate, compute_function_abi, file_manager_with_stdlib, CompileOptions, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_frontend::{ graph::{CrateId, CrateName}, - hir::Context, + hir::{Context, ParsedFiles}, }; use super::fs::write_to_file; @@ -47,15 +52,24 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + for package in &workspace { - check_package(package, &args.compile_options)?; + check_package(&workspace_file_manager, &parsed_files, package, &args.compile_options)?; println!("[{}] Constraint system successfully built!", package.name); } Ok(()) } -fn check_package(package: &Package, compile_options: &CompileOptions) -> Result<(), CompileError> { - let (mut context, crate_id) = prepare_package(package); +fn check_package( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + package: &Package, + compile_options: &CompileOptions, +) -> Result<(), CompileError> { + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, diff --git a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs index 02c83adb59a..e7ab86f343a 100644 --- a/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs +++ b/tooling/nargo_cli/src/cli/codegen_verifier_cmd.rs @@ -1,19 +1,14 @@ +use super::fs::{create_named_dir, write_to_file}; use super::NargoConfig; -use super::{ - compile_cmd::compile_bin_package, - fs::{create_named_dir, write_to_file}, -}; use crate::backends::Backend; +use crate::cli::compile_cmd::report_errors; use crate::errors::CliError; -use acvm::Language; -use backend_interface::BackendOpcodeSupport; -use bb_abstraction_leaks::ACVM_BACKEND_BARRETENBERG; use clap::Args; -use nargo::package::Package; -use nargo::workspace::Workspace; +use nargo::ops::compile_program; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; use noirc_frontend::graph::CrateName; /// Generates a Solidity verifier smart contract for the program @@ -46,17 +41,31 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; - for package in &workspace { - let smart_contract_string = smart_contract_for_package( - &workspace, - backend, + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let expression_width = backend.get_backend_info()?; + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + for package in binary_packages { + let compilation_result = compile_program( + &workspace_file_manager, + &parsed_files, package, &args.compile_options, - np_language, - &opcode_support, + expression_width, + None, + ); + + let program = report_errors( + compilation_result, + &workspace_file_manager, + args.compile_options.deny_warnings, + args.compile_options.silence_warnings, )?; + let smart_contract_string = backend.eth_contract(&program.circuit)?; + let contract_dir = workspace.contracts_directory_path(package); create_named_dir(&contract_dir, "contract"); let contract_path = contract_dir.join("plonk_vk").with_extension("sol"); @@ -67,24 +76,3 @@ pub(crate) fn run( Ok(()) } - -fn smart_contract_for_package( - workspace: &Workspace, - backend: &Backend, - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - opcode_support: &BackendOpcodeSupport, -) -> Result { - let program = - compile_bin_package(workspace, package, compile_options, np_language, opcode_support)?; - - let mut smart_contract_string = backend.eth_contract(&program.circuit)?; - - if backend.name() == ACVM_BACKEND_BARRETENBERG { - smart_contract_string = - bb_abstraction_leaks::complete_barretenberg_verifier_contract(smart_contract_string); - } - - Ok(smart_contract_string) -} diff --git a/tooling/nargo_cli/src/cli/compile_cmd.rs b/tooling/nargo_cli/src/cli/compile_cmd.rs index 26121650089..29e6012996a 100644 --- a/tooling/nargo_cli/src/cli/compile_cmd.rs +++ b/tooling/nargo_cli/src/cli/compile_cmd.rs @@ -1,39 +1,32 @@ use std::path::Path; -use acvm::acir::circuit::Opcode; -use acvm::Language; -use backend_interface::BackendOpcodeSupport; +use acvm::ExpressionWidth; + use fm::FileManager; -use iter_extended::vecmap; -use nargo::artifacts::contract::PreprocessedContract; -use nargo::artifacts::contract::PreprocessedContractFunction; -use nargo::artifacts::debug::DebugArtifact; -use nargo::artifacts::program::PreprocessedProgram; +use nargo::artifacts::program::ProgramArtifact; use nargo::errors::CompileError; +use nargo::ops::{compile_contract, compile_program}; use nargo::package::Package; -use nargo::prepare_package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::file_manager_with_stdlib; use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; use noirc_driver::{CompilationResult, CompileOptions, CompiledContract, CompiledProgram}; + use noirc_frontend::graph::CrateName; use clap::Args; +use noirc_frontend::hir::ParsedFiles; use crate::backends::Backend; use crate::errors::CliError; use super::fs::program::only_acir; -use super::fs::program::{ - read_debug_artifact_from_file, read_program_from_file, save_contract_to_file, - save_debug_artifact_to_file, save_program_to_file, -}; +use super::fs::program::{read_program_from_file, save_contract_to_file, save_program_to_file}; use super::NargoConfig; use rayon::prelude::*; -// TODO(#1388): pull this from backend. -const BACKEND_IDENTIFIER: &str = "acvm-backend-barretenberg"; - /// Compile the program and its secret execution trace into ACIR format #[derive(Debug, Clone, Args)] pub(crate) struct CompileCommand { @@ -66,23 +59,30 @@ pub(crate) fn run( )?; let circuit_dir = workspace.target_directory_path(); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let expression_width = backend.get_backend_info_or_default(); + let (compiled_program, compiled_contracts) = compile_workspace( + &workspace_file_manager, + &parsed_files, + &workspace, + expression_width, + &args.compile_options, + )?; + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace .into_iter() .filter(|package| !package.is_library()) .cloned() .partition(|package| package.is_binary()); - let (np_language, opcode_support) = backend.get_backend_info_or_default(); - let (_, compiled_contracts) = compile_workspace( - &workspace, - &binary_packages, - &contract_packages, - np_language, - &opcode_support, - &args.compile_options, - )?; - // Save build artifacts to disk. + let only_acir = args.compile_options.only_acir; + for (package, program) in binary_packages.into_iter().zip(compiled_program) { + save_program(program.clone(), &package, &workspace.target_directory_path(), only_acir); + } for (package, contract) in contract_packages.into_iter().zip(compiled_contracts) { save_contract(contract, &package, &circuit_dir); } @@ -91,37 +91,53 @@ pub(crate) fn run( } pub(super) fn compile_workspace( + file_manager: &FileManager, + parsed_files: &ParsedFiles, workspace: &Workspace, - binary_packages: &[Package], - contract_packages: &[Package], - np_language: Language, - opcode_support: &BackendOpcodeSupport, + expression_width: ExpressionWidth, compile_options: &CompileOptions, ) -> Result<(Vec, Vec), CliError> { + let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace + .into_iter() + .filter(|package| !package.is_library()) + .cloned() + .partition(|package| package.is_binary()); + // Compile all of the packages in parallel. - let program_results: Vec<(FileManager, CompilationResult)> = binary_packages + let program_results: Vec> = binary_packages .par_iter() .map(|package| { - let is_opcode_supported = |opcode: &_| opcode_support.is_opcode_supported(opcode); - compile_program(workspace, package, compile_options, np_language, &is_opcode_supported) + let program_artifact_path = workspace.package_build_path(package); + let cached_program: Option = + read_program_from_file(program_artifact_path) + .ok() + .filter(|p| p.noir_version == NOIR_ARTIFACT_VERSION_STRING) + .map(|p| p.into()); + + compile_program( + file_manager, + parsed_files, + package, + compile_options, + expression_width, + cached_program, + ) + }) + .collect(); + let contract_results: Vec> = contract_packages + .par_iter() + .map(|package| { + compile_contract(file_manager, parsed_files, package, compile_options, expression_width) }) .collect(); - let contract_results: Vec<(FileManager, CompilationResult)> = - contract_packages - .par_iter() - .map(|package| { - let is_opcode_supported = |opcode: &_| opcode_support.is_opcode_supported(opcode); - compile_contract(package, compile_options, np_language, &is_opcode_supported) - }) - .collect(); // Report any warnings/errors which were encountered during compilation. let compiled_programs: Vec = program_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -129,10 +145,10 @@ pub(super) fn compile_workspace( .collect::>()?; let compiled_contracts: Vec = contract_results .into_iter() - .map(|(file_manager, compilation_result)| { + .map(|compilation_result| { report_errors( compilation_result, - &file_manager, + file_manager, compile_options.deny_warnings, compile_options.silence_warnings, ) @@ -142,174 +158,25 @@ pub(super) fn compile_workspace( Ok((compiled_programs, compiled_contracts)) } -pub(crate) fn compile_bin_package( - workspace: &Workspace, - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - opcode_support: &BackendOpcodeSupport, -) -> Result { - if package.is_library() { - return Err(CompileError::LibraryCrate(package.name.clone()).into()); - } - - let (file_manager, compilation_result) = - compile_program(workspace, package, compile_options, np_language, &|opcode| { - opcode_support.is_opcode_supported(opcode) - }); - - let program = report_errors( - compilation_result, - &file_manager, - compile_options.deny_warnings, - compile_options.silence_warnings, - )?; - - Ok(program) -} - -fn compile_program( - workspace: &Workspace, - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); - if compile_options.instrument_debug { - context.instrument_debug = true; - } - - let program_artifact_path = workspace.package_build_path(package); - let mut debug_artifact_path = program_artifact_path.clone(); - debug_artifact_path.set_file_name(format!("debug_{}.json", package.name)); - let cached_program = if let (Ok(preprocessed_program), Ok(mut debug_artifact)) = ( - read_program_from_file(program_artifact_path), - read_debug_artifact_from_file(debug_artifact_path), - ) { - Some(CompiledProgram { - hash: preprocessed_program.hash, - circuit: preprocessed_program.bytecode, - abi: preprocessed_program.abi, - noir_version: preprocessed_program.noir_version, - debug: debug_artifact.debug_symbols.remove(0), - file_map: debug_artifact.file_map, - warnings: debug_artifact.warnings, - }) - } else { - None - }; - - let force_recompile = - cached_program.as_ref().map_or(false, |p| p.noir_version != NOIR_ARTIFACT_VERSION_STRING); - let (program, warnings) = match noirc_driver::compile_main( - &mut context, - crate_id, - compile_options, - cached_program, - force_recompile, - ) { - Ok(program_and_warnings) => program_and_warnings, - Err(errors) => { - return (context.file_manager, Err(errors)); - } - }; - - // Apply backend specific optimizations. - let optimized_program = nargo::ops::optimize_program(program, np_language, is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); - let only_acir = compile_options.only_acir; - save_program(optimized_program.clone(), package, &workspace.target_directory_path(), only_acir); - - (context.file_manager, Ok((optimized_program, warnings))) -} - -fn compile_contract( - package: &Package, - compile_options: &CompileOptions, - np_language: Language, - is_opcode_supported: &impl Fn(&Opcode) -> bool, -) -> (FileManager, CompilationResult) { - let (mut context, crate_id) = prepare_package(package); - let (contract, warnings) = - match noirc_driver::compile_contract(&mut context, crate_id, compile_options) { - Ok(contracts_and_warnings) => contracts_and_warnings, - Err(errors) => { - return (context.file_manager, Err(errors)); - } - }; - - let optimized_contract = - nargo::ops::optimize_contract(contract, np_language, &is_opcode_supported) - .expect("Backend does not support an opcode that is in the IR"); - - (context.file_manager, Ok((optimized_contract, warnings))) -} - -fn save_program( +pub(super) fn save_program( program: CompiledProgram, package: &Package, circuit_dir: &Path, only_acir_opt: bool, ) { - let preprocessed_program = PreprocessedProgram { - hash: program.hash, - backend: String::from(BACKEND_IDENTIFIER), - abi: program.abi, - noir_version: program.noir_version, - bytecode: program.circuit, - }; + let program_artifact = ProgramArtifact::from(program.clone()); if only_acir_opt { - only_acir(&preprocessed_program, circuit_dir); + only_acir(&program_artifact, circuit_dir); } else { - save_program_to_file(&preprocessed_program, &package.name, circuit_dir); + save_program_to_file(&program_artifact, &package.name, circuit_dir); } - - let debug_artifact = DebugArtifact { - debug_symbols: vec![program.debug], - file_map: program.file_map, - warnings: program.warnings, - }; - let circuit_name: String = (&package.name).into(); - save_debug_artifact_to_file(&debug_artifact, &circuit_name, circuit_dir); } fn save_contract(contract: CompiledContract, package: &Package, circuit_dir: &Path) { - // TODO(#1389): I wonder if it is incorrect for nargo-core to know anything about contracts. - // As can be seen here, It seems like a leaky abstraction where ContractFunctions (essentially CompiledPrograms) - // are compiled via nargo-core and then the PreprocessedContract is constructed here. - // This is due to EACH function needing it's own CRS, PKey, and VKey from the backend. - let debug_artifact = DebugArtifact { - debug_symbols: contract.functions.iter().map(|function| function.debug.clone()).collect(), - file_map: contract.file_map, - warnings: contract.warnings, - }; - - let preprocessed_functions = vecmap(contract.functions, |func| PreprocessedContractFunction { - name: func.name, - function_type: func.function_type, - is_internal: func.is_internal, - abi: func.abi, - bytecode: func.bytecode, - }); - - let preprocessed_contract = PreprocessedContract { - noir_version: contract.noir_version, - name: contract.name, - backend: String::from(BACKEND_IDENTIFIER), - functions: preprocessed_functions, - events: contract.events, - }; - + let contract_name = contract.name.clone(); save_contract_to_file( - &preprocessed_contract, - &format!("{}-{}", package.name, preprocessed_contract.name), - circuit_dir, - ); - - save_debug_artifact_to_file( - &debug_artifact, - &format!("{}-{}", package.name, preprocessed_contract.name), + &contract.into(), + &format!("{}-{}", package.name, contract_name), circuit_dir, ); } diff --git a/tooling/nargo_cli/src/cli/dap_cmd.rs b/tooling/nargo_cli/src/cli/dap_cmd.rs index 55ae3601b5e..b7c009f0486 100644 --- a/tooling/nargo_cli/src/cli/dap_cmd.rs +++ b/tooling/nargo_cli/src/cli/dap_cmd.rs @@ -2,10 +2,14 @@ use acvm::acir::native_types::WitnessMap; use backend_interface::Backend; use clap::Args; use nargo::constants::PROVER_INPUT_FILE; +use nargo::ops::compile_program_with_debug_state; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; use std::io::{BufReader, BufWriter, Read, Write}; @@ -17,7 +21,8 @@ use dap::server::Server; use dap::types::Capabilities; use serde_json::Value; -use super::compile_cmd::compile_bin_package; +use super::compile_cmd::report_errors; +use super::debug_cmd::instrument_package_files; use super::fs::inputs::read_inputs_from_file; use crate::errors::CliError; @@ -86,24 +91,41 @@ fn load_and_compile_project( ) -> Result<(CompiledProgram, WitnessMap), LoadError> { let workspace = find_workspace(project_folder, package) .ok_or(LoadError::Generic(workspace_not_found_error_msg(project_folder, package)))?; - let (np_language, opcode_support) = backend - .get_backend_info() - .map_err(|_| LoadError::Generic("Failed to get backend info".into()))?; + let expression_width = backend + .get_backend_info() + .map_err(|_| LoadError::Generic("Failed to get backend info".into()))?; let package = workspace .into_iter() .find(|p| p.is_binary()) .ok_or(LoadError::Generic("No matching binary packages found in workspace".into()))?; - let compiled_program = compile_bin_package( - &workspace, + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); + + let compile_options = CompileOptions { + instrument_debug: !skip_instrumentation, + force_brillig: !generate_acir, + ..CompileOptions::default() + }; + + let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + let compilation_result = compile_program_with_debug_state( + &workspace_file_manager, + &parsed_files, package, - &CompileOptions { - instrument_debug: !skip_instrumentation, - force_brillig: !generate_acir, - ..CompileOptions::default() - }, - np_language, - &opcode_support, + &compile_options, + expression_width, + None, + debug_state, + ); + + let compiled_program = report_errors( + compilation_result, + &workspace_file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, ) .map_err(|_| LoadError::Generic("Failed to compile project".into()))?; @@ -180,9 +202,7 @@ fn loop_uninitialized_dap( Ok((compiled_program, initial_witness)) => { server.respond(req.ack()?)?; - #[allow(deprecated)] - let blackbox_solver = - barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = bn254_blackbox_solver::Bn254BlackBoxSolver::new(); noir_debugger::run_dap_loop( server, diff --git a/tooling/nargo_cli/src/cli/debug_cmd.rs b/tooling/nargo_cli/src/cli/debug_cmd.rs index 3dac1088b28..e1fe9b673da 100644 --- a/tooling/nargo_cli/src/cli/debug_cmd.rs +++ b/tooling/nargo_cli/src/cli/debug_cmd.rs @@ -1,18 +1,26 @@ use std::path::PathBuf; use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use fm::FileManager; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; +use nargo::ops::compile_program_with_debug_state; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; +use noirc_frontend::debug::DebugState; use noirc_frontend::graph::CrateName; +use noirc_frontend::hir::ParsedFiles; -use super::compile_cmd::compile_bin_package; +use super::compile_cmd::report_errors; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; use crate::backends::Backend; @@ -53,7 +61,11 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; let target_dir = &workspace.target_directory_path(); - let (np_language, opcode_support) = backend.get_backend_info()?; + let expression_width = backend.get_backend_info()?; + + let mut workspace_file_manager = file_manager_with_stdlib(std::path::Path::new("")); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let mut parsed_files = parse_all(&workspace_file_manager); let Some(package) = workspace.into_iter().find(|p| p.is_binary()) else { println!( @@ -62,17 +74,58 @@ pub(crate) fn run( return Ok(()); }; - let compiled_program = compile_bin_package( - &workspace, + let debug_state = instrument_package_files(&mut parsed_files, &workspace_file_manager, package); + + let compilation_result = compile_program_with_debug_state( + &workspace_file_manager, + &parsed_files, package, &args.compile_options, - np_language, - &opcode_support, + expression_width, + None, + debug_state, + ); + + let compiled_program = report_errors( + compilation_result, + &workspace_file_manager, + args.compile_options.deny_warnings, + args.compile_options.silence_warnings, )?; run_async(package, compiled_program, &args.prover_name, &args.witness_name, target_dir) } +/// Add debugging instrumentation to all parsed files belonging to the package +/// being compiled +pub(crate) fn instrument_package_files( + parsed_files: &mut ParsedFiles, + file_manager: &FileManager, + package: &Package, +) -> DebugState { + // Start off at the entry path and read all files in the parent directory. + let entry_path_parent = package + .entry_path + .parent() + .unwrap_or_else(|| panic!("The entry path is expected to be a single file within a directory and so should have a parent {:?}", package.entry_path)) + .clone(); + + let mut debug_state = DebugState::default(); + + for (file_id, parsed_file) in parsed_files.iter_mut() { + let file_path = + file_manager.path(*file_id).expect("Parsed file ID not found in file manager"); + for ancestor in file_path.ancestors() { + if ancestor == entry_path_parent { + // file is in package + debug_state.insert_symbols(&mut parsed_file.0); + } + } + } + + debug_state +} + fn run_async( package: &Package, program: CompiledProgram, @@ -132,8 +185,7 @@ pub(crate) fn debug_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, ) -> Result, CliError> { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; diff --git a/tooling/nargo_cli/src/cli/execute_cmd.rs b/tooling/nargo_cli/src/cli/execute_cmd.rs index 2f69b4c7df7..a84b2821f1e 100644 --- a/tooling/nargo_cli/src/cli/execute_cmd.rs +++ b/tooling/nargo_cli/src/cli/execute_cmd.rs @@ -1,21 +1,25 @@ use acvm::acir::native_types::WitnessMap; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use nargo::artifacts::debug::DebugArtifact; use nargo::constants::PROVER_INPUT_FILE; use nargo::errors::try_to_diagnose_runtime_error; -use nargo::ops::DefaultForeignCallExecutor; +use nargo::ops::{compile_program, DefaultForeignCallExecutor}; use nargo::package::Package; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::{Format, InputValue}; use noirc_abi::InputMap; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; -use super::compile_cmd::compile_bin_package; use super::fs::{inputs::read_inputs_from_file, witness::save_witness_to_dir}; use super::NargoConfig; use crate::backends::Backend; +use crate::cli::compile_cmd::report_errors; use crate::errors::CliError; /// Executes a circuit to calculate its return value @@ -38,6 +42,10 @@ pub(crate) struct ExecuteCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -56,18 +64,35 @@ pub(crate) fn run( )?; let target_dir = &workspace.target_directory_path(); - let (np_language, opcode_support) = backend.get_backend_info_or_default(); - for package in &workspace { - let compiled_program = compile_bin_package( - &workspace, + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let expression_width = backend.get_backend_info_or_default(); + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + for package in binary_packages { + let compilation_result = compile_program( + &workspace_file_manager, + &parsed_files, package, &args.compile_options, - np_language, - &opcode_support, + expression_width, + None, + ); + + let compiled_program = report_errors( + compilation_result, + &workspace_file_manager, + args.compile_options.deny_warnings, + args.compile_options.silence_warnings, )?; - let (return_value, solved_witness) = - execute_program_and_decode(compiled_program, package, &args.prover_name)?; + let (return_value, solved_witness) = execute_program_and_decode( + compiled_program, + package, + &args.prover_name, + args.oracle_resolver.as_deref(), + )?; println!("[{}] Circuit witness successfully solved", package.name); if let Some(return_value) = return_value { @@ -86,11 +111,12 @@ fn execute_program_and_decode( program: CompiledProgram, package: &Package, prover_name: &str, + foreign_call_resolver_url: Option<&str>, ) -> Result<(Option, WitnessMap), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &program.abi)?; - let solved_witness = execute_program(&program, &inputs_map)?; + let solved_witness = execute_program(&program, &inputs_map, foreign_call_resolver_url)?; let public_abi = program.abi.public_abi(); let (_, return_value) = public_abi.decode(&solved_witness)?; @@ -100,9 +126,9 @@ fn execute_program_and_decode( pub(crate) fn execute_program( compiled_program: &CompiledProgram, inputs_map: &InputMap, + foreign_call_resolver_url: Option<&str>, ) -> Result { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let initial_witness = compiled_program.abi.encode(inputs_map, None)?; @@ -110,7 +136,7 @@ pub(crate) fn execute_program( &compiled_program.circuit, initial_witness, &blackbox_solver, - &mut DefaultForeignCallExecutor::new(true), + &mut DefaultForeignCallExecutor::new(true, foreign_call_resolver_url), ); match solved_witness_err { Ok(solved_witness) => Ok(solved_witness), diff --git a/tooling/nargo_cli/src/cli/export_cmd.rs b/tooling/nargo_cli/src/cli/export_cmd.rs new file mode 100644 index 00000000000..96b24796a2b --- /dev/null +++ b/tooling/nargo_cli/src/cli/export_cmd.rs @@ -0,0 +1,124 @@ +use nargo::errors::CompileError; +use noirc_errors::FileDiagnostic; +use noirc_frontend::hir::ParsedFiles; +use rayon::prelude::*; + +use fm::FileManager; +use iter_extended::try_vecmap; +use nargo::package::Package; +use nargo::prepare_package; +use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; +use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; +use noirc_driver::{ + compile_no_check, file_manager_with_stdlib, CompileOptions, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, +}; + +use noirc_frontend::graph::CrateName; + +use clap::Args; + +use crate::backends::Backend; +use crate::errors::CliError; + +use super::check_cmd::check_crate_and_report_errors; + +use super::compile_cmd::report_errors; +use super::fs::program::save_program_to_file; +use super::NargoConfig; + +/// Exports functions marked with #[export] attribute +#[derive(Debug, Clone, Args)] +pub(crate) struct ExportCommand { + /// The name of the package to compile + #[clap(long, conflicts_with = "workspace")] + package: Option, + + /// Compile all packages in the workspace + #[clap(long, conflicts_with = "package")] + workspace: bool, + + #[clap(flatten)] + compile_options: CompileOptions, +} + +pub(crate) fn run( + _backend: &Backend, + args: ExportCommand, + config: NargoConfig, +) -> Result<(), CliError> { + let toml_path = get_package_manifest(&config.program_dir)?; + let default_selection = + if args.workspace { PackageSelection::All } else { PackageSelection::DefaultOrAll }; + let selection = args.package.map_or(default_selection, PackageSelection::Selected); + + let workspace = resolve_workspace_from_toml( + &toml_path, + selection, + Some(NOIR_ARTIFACT_VERSION_STRING.to_owned()), + )?; + + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let library_packages: Vec<_> = + workspace.into_iter().filter(|package| package.is_library()).collect(); + + library_packages + .par_iter() + .map(|package| { + compile_exported_functions( + &workspace_file_manager, + &parsed_files, + &workspace, + package, + &args.compile_options, + ) + }) + .collect() +} + +fn compile_exported_functions( + file_manager: &FileManager, + parsed_files: &ParsedFiles, + workspace: &Workspace, + package: &Package, + compile_options: &CompileOptions, +) -> Result<(), CliError> { + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); + check_crate_and_report_errors( + &mut context, + crate_id, + compile_options.deny_warnings, + compile_options.disable_macros, + compile_options.silence_warnings, + )?; + + let exported_functions = context.get_all_exported_functions_in_crate(&crate_id); + + let exported_programs = try_vecmap( + exported_functions, + |(function_name, function_id)| -> Result<(String, CompiledProgram), CompileError> { + // TODO: We should to refactor how to deal with compilation errors to avoid this. + let program = compile_no_check(&mut context, compile_options, function_id, None, false) + .map_err(|error| vec![FileDiagnostic::from(error)]); + + let program = report_errors( + program.map(|program| (program, Vec::new())), + file_manager, + compile_options.deny_warnings, + compile_options.silence_warnings, + )?; + + Ok((function_name, program)) + }, + )?; + + let export_dir = workspace.export_directory_path(); + for (function_name, program) in exported_programs { + save_program_to_file(&program.into(), &function_name.parse().unwrap(), &export_dir); + } + Ok(()) +} diff --git a/tooling/nargo_cli/src/cli/fmt_cmd.rs b/tooling/nargo_cli/src/cli/fmt_cmd.rs index 0c2ca71eba3..0bd25a3a0ce 100644 --- a/tooling/nargo_cli/src/cli/fmt_cmd.rs +++ b/tooling/nargo_cli/src/cli/fmt_cmd.rs @@ -1,10 +1,9 @@ use std::{fs::DirEntry, path::Path}; use clap::Args; -use fm::FileManager; -use nargo::insert_all_files_for_package_into_file_manager; +use nargo::insert_all_files_for_workspace_into_file_manager; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::NOIR_ARTIFACT_VERSION_STRING; +use noirc_driver::{file_manager_with_stdlib, NOIR_ARTIFACT_VERSION_STRING}; use noirc_errors::CustomDiagnostic; use noirc_frontend::{hir::def_map::parse_file, parser::ParserError}; @@ -30,18 +29,19 @@ pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliErr Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let config = nargo_fmt::Config::read(&config.program_dir) .map_err(|err| CliError::Generic(err.to_string()))?; let mut check_exit_code_one = false; for package in &workspace { - let mut file_manager = FileManager::new(&package.root_dir); - insert_all_files_for_package_into_file_manager(package, &mut file_manager); - visit_noir_files(&package.root_dir.join("src"), &mut |entry| { - let file_id = file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); - let (parsed_module, errors) = parse_file(&file_manager, file_id); + let file_id = workspace_file_manager.name_to_id(entry.path().to_path_buf()).expect("The file should exist since we added all files in the package into the file manager"); + + let (parsed_module, errors) = parse_file(&workspace_file_manager, file_id); let is_all_warnings = errors.iter().all(ParserError::is_warning); if !is_all_warnings { @@ -55,14 +55,14 @@ pub(crate) fn run(args: FormatCommand, config: NargoConfig) -> Result<(), CliErr let _ = super::compile_cmd::report_errors::<()>( Err(errors), - &file_manager, + &workspace_file_manager, false, false, ); return Ok(()); } - let original = file_manager.fetch_file(file_id).source(); + let original = workspace_file_manager.fetch_file(file_id).expect("The file should exist since we added all files in the package into the file manager"); let formatted = nargo_fmt::format(original, parsed_module, &config); if check_mode { diff --git a/tooling/nargo_cli/src/cli/fs/program.rs b/tooling/nargo_cli/src/cli/fs/program.rs index 807df25ba48..1fb57ae6685 100644 --- a/tooling/nargo_cli/src/cli/fs/program.rs +++ b/tooling/nargo_cli/src/cli/fs/program.rs @@ -1,9 +1,7 @@ use std::path::{Path, PathBuf}; use acvm::acir::circuit::Circuit; -use nargo::artifacts::{ - contract::PreprocessedContract, debug::DebugArtifact, program::PreprocessedProgram, -}; +use nargo::artifacts::{contract::ContractArtifact, program::ProgramArtifact}; use noirc_frontend::graph::CrateName; use crate::errors::FilesystemError; @@ -11,44 +9,35 @@ use crate::errors::FilesystemError; use super::{create_named_dir, write_to_file}; pub(crate) fn save_program_to_file>( - compiled_program: &PreprocessedProgram, + program_artifact: &ProgramArtifact, crate_name: &CrateName, circuit_dir: P, ) -> PathBuf { let circuit_name: String = crate_name.into(); - save_build_artifact_to_file(compiled_program, &circuit_name, circuit_dir) + save_build_artifact_to_file(program_artifact, &circuit_name, circuit_dir) } /// Writes the bytecode as acir.gz pub(crate) fn only_acir>( - compiled_program: &PreprocessedProgram, + program_artifact: &ProgramArtifact, circuit_dir: P, ) -> PathBuf { create_named_dir(circuit_dir.as_ref(), "target"); let circuit_path = circuit_dir.as_ref().join("acir").with_extension("gz"); - let bytes = Circuit::serialize_circuit(&compiled_program.bytecode); + let bytes = Circuit::serialize_circuit(&program_artifact.bytecode); write_to_file(&bytes, &circuit_path); circuit_path } pub(crate) fn save_contract_to_file>( - compiled_contract: &PreprocessedContract, + compiled_contract: &ContractArtifact, circuit_name: &str, circuit_dir: P, ) -> PathBuf { save_build_artifact_to_file(compiled_contract, circuit_name, circuit_dir) } -pub(crate) fn save_debug_artifact_to_file>( - debug_artifact: &DebugArtifact, - circuit_name: &str, - circuit_dir: P, -) -> PathBuf { - let artifact_name = format!("debug_{circuit_name}"); - save_build_artifact_to_file(debug_artifact, &artifact_name, circuit_dir) -} - fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( build_artifact: &T, artifact_name: &str, @@ -64,7 +53,7 @@ fn save_build_artifact_to_file, T: ?Sized + serde::Serialize>( pub(crate) fn read_program_from_file>( circuit_path: P, -) -> Result { +) -> Result { let file_path = circuit_path.as_ref().with_extension("json"); let input_string = @@ -74,14 +63,3 @@ pub(crate) fn read_program_from_file>( Ok(program) } - -pub(crate) fn read_debug_artifact_from_file>( - debug_artifact_path: P, -) -> Result { - let input_string = std::fs::read(&debug_artifact_path) - .map_err(|_| FilesystemError::PathNotValid(debug_artifact_path.as_ref().into()))?; - let program = serde_json::from_slice(&input_string) - .map_err(|err| FilesystemError::ProgramSerializationError(err.to_string()))?; - - Ok(program) -} diff --git a/tooling/nargo_cli/src/cli/info_cmd.rs b/tooling/nargo_cli/src/cli/info_cmd.rs index b0f771bfc1c..8dfff67b47f 100644 --- a/tooling/nargo_cli/src/cli/info_cmd.rs +++ b/tooling/nargo_cli/src/cli/info_cmd.rs @@ -1,13 +1,17 @@ use std::collections::HashMap; -use acvm::Language; +use acvm::ExpressionWidth; use backend_interface::BackendError; use clap::Args; use iter_extended::vecmap; -use nargo::{artifacts::debug::DebugArtifact, package::Package}; +use nargo::{ + artifacts::debug::DebugArtifact, insert_all_files_for_workspace_into_file_manager, + package::Package, parse_all, +}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_driver::{ - CompileOptions, CompiledContract, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, + file_manager_with_stdlib, CompileOptions, CompiledContract, CompiledProgram, + NOIR_ARTIFACT_VERSION_STRING, }; use noirc_errors::{debug_info::OpCodesCount, Location}; use noirc_frontend::graph::CrateName; @@ -61,19 +65,16 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (binary_packages, contract_packages): (Vec<_>, Vec<_>) = workspace - .into_iter() - .filter(|package| !package.is_library()) - .cloned() - .partition(|package| package.is_binary()); + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); - let (np_language, opcode_support) = backend.get_backend_info_or_default(); + let expression_width = backend.get_backend_info_or_default(); let (compiled_programs, compiled_contracts) = compile_workspace( + &workspace_file_manager, + &parsed_files, &workspace, - &binary_packages, - &contract_packages, - np_language, - &opcode_support, + expression_width, &args.compile_options, )?; @@ -94,17 +95,18 @@ pub(crate) fn run( } } + let binary_packages = + workspace.into_iter().filter(|package| package.is_binary()).zip(compiled_programs); let program_info = binary_packages - .into_par_iter() - .zip(compiled_programs) + .par_bridge() .map(|(package, program)| { - count_opcodes_and_gates_in_program(backend, program, &package, np_language) + count_opcodes_and_gates_in_program(backend, program, package, expression_width) }) .collect::>()?; let contract_info = compiled_contracts .into_par_iter() - .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, np_language)) + .map(|contract| count_opcodes_and_gates_in_contract(backend, contract, expression_width)) .collect::>()?; let info_report = InfoReport { programs: program_info, contracts: contract_info }; @@ -115,7 +117,7 @@ pub(crate) fn run( } else { // Otherwise print human-readable table. if !info_report.programs.is_empty() { - let mut program_table = table!([Fm->"Package", Fm->"Language", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); + let mut program_table = table!([Fm->"Package", Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size"]); for program in info_report.programs { program_table.add_row(program.into()); @@ -126,7 +128,7 @@ pub(crate) fn run( let mut contract_table = table!([ Fm->"Contract", Fm->"Function", - Fm->"Language", + Fm->"Expression Width", Fm->"ACIR Opcodes", Fm->"Backend Circuit Size" ]); @@ -203,7 +205,7 @@ struct InfoReport { struct ProgramInfo { name: String, #[serde(skip)] - language: Language, + expression_width: ExpressionWidth, acir_opcodes: usize, circuit_size: u32, } @@ -212,7 +214,7 @@ impl From for Row { fn from(program_info: ProgramInfo) -> Self { row![ Fm->format!("{}", program_info.name), - format!("{:?}", program_info.language), + format!("{:?}", program_info.expression_width), Fc->format!("{}", program_info.acir_opcodes), Fc->format!("{}", program_info.circuit_size), ] @@ -223,7 +225,7 @@ impl From for Row { struct ContractInfo { name: String, #[serde(skip)] - language: Language, + expression_width: ExpressionWidth, functions: Vec, } @@ -240,7 +242,7 @@ impl From for Vec { row![ Fm->format!("{}", contract_info.name), Fc->format!("{}", function.name), - format!("{:?}", contract_info.language), + format!("{:?}", contract_info.expression_width), Fc->format!("{}", function.acir_opcodes), Fc->format!("{}", function.circuit_size), ] @@ -252,11 +254,11 @@ fn count_opcodes_and_gates_in_program( backend: &Backend, compiled_program: CompiledProgram, package: &Package, - language: Language, + expression_width: ExpressionWidth, ) -> Result { Ok(ProgramInfo { name: package.name.to_string(), - language, + expression_width, acir_opcodes: compiled_program.circuit.opcodes.len(), circuit_size: backend.get_exact_circuit_size(&compiled_program.circuit)?, }) @@ -265,7 +267,7 @@ fn count_opcodes_and_gates_in_program( fn count_opcodes_and_gates_in_contract( backend: &Backend, contract: CompiledContract, - language: Language, + expression_width: ExpressionWidth, ) -> Result { let functions = contract .functions @@ -279,5 +281,5 @@ fn count_opcodes_and_gates_in_contract( }) .collect::>()?; - Ok(ContractInfo { name: contract.name, language, functions }) + Ok(ContractInfo { name: contract.name, expression_width, functions }) } diff --git a/tooling/nargo_cli/src/cli/init_cmd.rs b/tooling/nargo_cli/src/cli/init_cmd.rs index e53c2e4cdc9..dd3af97ecd6 100644 --- a/tooling/nargo_cli/src/cli/init_cmd.rs +++ b/tooling/nargo_cli/src/cli/init_cmd.rs @@ -1,4 +1,3 @@ -use crate::backends::Backend; use crate::errors::CliError; use super::fs::{create_named_dir, write_to_file}; @@ -34,12 +33,7 @@ const BIN_EXAMPLE: &str = include_str!("./noir_template_files/binary.nr"); const CONTRACT_EXAMPLE: &str = include_str!("./noir_template_files/contract.nr"); const LIB_EXAMPLE: &str = include_str!("./noir_template_files/library.nr"); -pub(crate) fn run( - // Backend is currently unused, but we might want to use it to inform the "new" template in the future - _backend: &Backend, - args: InitCommand, - config: NargoConfig, -) -> Result<(), CliError> { +pub(crate) fn run(args: InitCommand, config: NargoConfig) -> Result<(), CliError> { let package_name = match args.name { Some(name) => name, None => { diff --git a/tooling/nargo_cli/src/cli/lsp_cmd.rs b/tooling/nargo_cli/src/cli/lsp_cmd.rs index a41bb877991..1428b8070c8 100644 --- a/tooling/nargo_cli/src/cli/lsp_cmd.rs +++ b/tooling/nargo_cli/src/cli/lsp_cmd.rs @@ -1,7 +1,8 @@ use async_lsp::{ - client_monitor::ClientProcessMonitorLayer, concurrency::ConcurrencyLayer, - panic::CatchUnwindLayer, server::LifecycleLayer, tracing::TracingLayer, + concurrency::ConcurrencyLayer, panic::CatchUnwindLayer, server::LifecycleLayer, + tracing::TracingLayer, }; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; use noir_lsp::NargoLspService; use tower::ServiceBuilder; @@ -30,8 +31,7 @@ pub(crate) fn run( runtime.block_on(async { let (server, _) = async_lsp::MainLoop::new_server(|client| { - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); + let blackbox_solver = Bn254BlackBoxSolver::new(); let router = NargoLspService::new(&client, blackbox_solver); ServiceBuilder::new() @@ -39,10 +39,11 @@ pub(crate) fn run( .layer(LifecycleLayer::default()) .layer(CatchUnwindLayer::default()) .layer(ConcurrencyLayer::default()) - .layer(ClientProcessMonitorLayer::new(client)) .service(router) }); + eprintln!("LSP starting..."); + // Prefer truly asynchronous piped stdin/stdout without blocking tasks. #[cfg(unix)] let (stdin, stdout) = ( diff --git a/tooling/nargo_cli/src/cli/mod.rs b/tooling/nargo_cli/src/cli/mod.rs index a4a38ff7851..01adbe9da98 100644 --- a/tooling/nargo_cli/src/cli/mod.rs +++ b/tooling/nargo_cli/src/cli/mod.rs @@ -17,6 +17,7 @@ mod compile_cmd; mod dap_cmd; mod debug_cmd; mod execute_cmd; +mod export_cmd; mod fmt_cmd; mod info_cmd; mod init_cmd; @@ -69,6 +70,8 @@ enum NargoCommand { Init(init_cmd::InitCommand), Execute(execute_cmd::ExecuteCommand), #[command(hide = true)] // Hidden while the feature is being built out + Export(export_cmd::ExportCommand), + #[command(hide = true)] // Hidden while the feature is being built out Debug(debug_cmd::DebugCommand), Prove(prove_cmd::ProveCommand), Verify(verify_cmd::VerifyCommand), @@ -104,11 +107,12 @@ pub(crate) fn start_cli() -> eyre::Result<()> { match command { NargoCommand::New(args) => new_cmd::run(&backend, args, config), - NargoCommand::Init(args) => init_cmd::run(&backend, args, config), + NargoCommand::Init(args) => init_cmd::run(args, config), NargoCommand::Check(args) => check_cmd::run(&backend, args, config), NargoCommand::Compile(args) => compile_cmd::run(&backend, args, config), NargoCommand::Debug(args) => debug_cmd::run(&backend, args, config), NargoCommand::Execute(args) => execute_cmd::run(&backend, args, config), + NargoCommand::Export(args) => export_cmd::run(&backend, args, config), NargoCommand::Prove(args) => prove_cmd::run(&backend, args, config), NargoCommand::Verify(args) => verify_cmd::run(&backend, args, config), NargoCommand::Test(args) => test_cmd::run(&backend, args, config), diff --git a/tooling/nargo_cli/src/cli/prove_cmd.rs b/tooling/nargo_cli/src/cli/prove_cmd.rs index 54b148ec3a2..a79c21c81c9 100644 --- a/tooling/nargo_cli/src/cli/prove_cmd.rs +++ b/tooling/nargo_cli/src/cli/prove_cmd.rs @@ -1,13 +1,17 @@ use clap::Args; use nargo::constants::{PROVER_INPUT_FILE, VERIFIER_INPUT_FILE}; +use nargo::ops::compile_program; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; -use super::compile_cmd::compile_bin_package; +use super::compile_cmd::report_errors; use super::fs::{ inputs::{read_inputs_from_file, write_inputs_to_file}, proof::save_proof_to_dir, @@ -40,6 +44,10 @@ pub(crate) struct ProveCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,30 +65,45 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; - for package in &workspace { - let program = compile_bin_package( - &workspace, + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let expression_width = backend.get_backend_info()?; + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + for package in binary_packages { + let compilation_result = compile_program( + &workspace_file_manager, + &parsed_files, package, &args.compile_options, - np_language, - &opcode_support, + expression_width, + None, + ); + + let compiled_program = report_errors( + compilation_result, + &workspace_file_manager, + args.compile_options.deny_warnings, + args.compile_options.silence_warnings, )?; prove_package( backend, &workspace, package, - program, + compiled_program, &args.prover_name, &args.verifier_name, args.verify, + args.oracle_resolver.as_deref(), )?; } Ok(()) } +#[allow(clippy::too_many_arguments)] pub(crate) fn prove_package( backend: &Backend, workspace: &Workspace, @@ -89,12 +112,14 @@ pub(crate) fn prove_package( prover_name: &str, verifier_name: &str, check_proof: bool, + foreign_call_resolver_url: Option<&str>, ) -> Result<(), CliError> { // Parse the initial witness values from Prover.toml let (inputs_map, _) = read_inputs_from_file(&package.root_dir, prover_name, Format::Toml, &compiled_program.abi)?; - let solved_witness = execute_program(&compiled_program, &inputs_map)?; + let solved_witness = + execute_program(&compiled_program, &inputs_map, foreign_call_resolver_url)?; // Write public inputs into Verifier.toml let public_abi = compiled_program.abi.public_abi(); diff --git a/tooling/nargo_cli/src/cli/test_cmd.rs b/tooling/nargo_cli/src/cli/test_cmd.rs index a31b9e7d9b9..503fd5afdd4 100644 --- a/tooling/nargo_cli/src/cli/test_cmd.rs +++ b/tooling/nargo_cli/src/cli/test_cmd.rs @@ -1,15 +1,21 @@ use std::io::Write; use acvm::BlackBoxFunctionSolver; +use bn254_blackbox_solver::Bn254BlackBoxSolver; use clap::Args; +use fm::FileManager; use nargo::{ + insert_all_files_for_workspace_into_file_manager, ops::{run_test, TestStatus}, package::Package, - prepare_package, + parse_all, prepare_package, }; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; -use noirc_driver::{CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; -use noirc_frontend::{graph::CrateName, hir::FunctionNameMatch}; +use noirc_driver::{file_manager_with_stdlib, CompileOptions, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_frontend::{ + graph::CrateName, + hir::{FunctionNameMatch, ParsedFiles}, +}; use termcolor::{Color, ColorChoice, ColorSpec, StandardStream, WriteColor}; use crate::{backends::Backend, cli::check_cmd::check_crate_and_report_errors, errors::CliError}; @@ -40,6 +46,10 @@ pub(crate) struct TestCommand { #[clap(flatten)] compile_options: CompileOptions, + + /// JSON RPC url to solve oracle calls + #[clap(long)] + oracle_resolver: Option, } pub(crate) fn run( @@ -57,6 +67,10 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + let pattern = match &args.test_name { Some(name) => { if args.exact { @@ -68,25 +82,59 @@ pub(crate) fn run( None => FunctionNameMatch::Anything, }; - #[allow(deprecated)] - let blackbox_solver = barretenberg_blackbox_solver::BarretenbergSolver::new(); - for package in &workspace { - // By unwrapping here with `?`, we stop the test runner upon a package failing - // TODO: We should run the whole suite even if there are failures in a package - run_tests(&blackbox_solver, package, pattern, args.show_output, &args.compile_options)?; + let blackbox_solver = Bn254BlackBoxSolver::new(); + + let test_reports: Vec> = workspace + .into_iter() + .map(|package| { + run_tests( + &workspace_file_manager, + &parsed_files, + &blackbox_solver, + package, + pattern, + args.show_output, + args.oracle_resolver.as_deref(), + &args.compile_options, + ) + }) + .collect::>()?; + let test_report: Vec<(String, TestStatus)> = test_reports.into_iter().flatten().collect(); + + if test_report.is_empty() { + match &pattern { + FunctionNameMatch::Exact(pattern) => { + return Err(CliError::Generic( + format!("Found 0 tests matching input '{pattern}'.",), + )) + } + FunctionNameMatch::Contains(pattern) => { + return Err(CliError::Generic(format!("Found 0 tests containing '{pattern}'.",))) + } + // If we are running all tests in a crate, having none is not an error + FunctionNameMatch::Anything => {} + }; } - Ok(()) + if test_report.iter().any(|(_, status)| !matches!(status, TestStatus::Fail { .. })) { + Ok(()) + } else { + Err(CliError::Generic(String::new())) + } } +#[allow(clippy::too_many_arguments)] fn run_tests( + file_manager: &FileManager, + parsed_files: &ParsedFiles, blackbox_solver: &S, package: &Package, fn_name: FunctionNameMatch, show_output: bool, + foreign_call_resolver_url: Option<&str>, compile_options: &CompileOptions, -) -> Result<(), CliError> { - let (mut context, crate_id) = prepare_package(package); +) -> Result, CliError> { + let (mut context, crate_id) = prepare_package(file_manager, parsed_files, package); check_crate_and_report_errors( &mut context, crate_id, @@ -97,77 +145,73 @@ fn run_tests( let test_functions = context.get_all_test_functions_in_crate_matching(&crate_id, fn_name); let count_all = test_functions.len(); - if count_all == 0 { - return match &fn_name { - FunctionNameMatch::Anything => { - Err(CliError::Generic(format!("[{}] Found 0 tests.", package.name))) - } - FunctionNameMatch::Exact(pattern) => Err(CliError::Generic(format!( - "[{}] Found 0 tests matching input '{pattern}'.", - package.name - ))), - FunctionNameMatch::Contains(pattern) => Err(CliError::Generic(format!( - "[{}] Found 0 tests containing '{pattern}'.", - package.name - ))), - }; - } let plural = if count_all == 1 { "" } else { "s" }; println!("[{}] Running {count_all} test function{plural}", package.name); - let mut count_failed = 0; let writer = StandardStream::stderr(ColorChoice::Always); let mut writer = writer.lock(); + let mut test_report: Vec<(String, TestStatus)> = Vec::new(); for (test_name, test_function) in test_functions { write!(writer, "[{}] Testing {test_name}... ", package.name) - .expect("Failed to write to stdout"); + .expect("Failed to write to stderr"); writer.flush().expect("Failed to flush writer"); - match run_test(blackbox_solver, &mut context, test_function, show_output, compile_options) { + let test_status = run_test( + blackbox_solver, + &mut context, + test_function, + show_output, + foreign_call_resolver_url, + compile_options, + ); + + match &test_status { TestStatus::Pass { .. } => { writer .set_color(ColorSpec::new().set_fg(Some(Color::Green))) .expect("Failed to set color"); - writeln!(writer, "ok").expect("Failed to write to stdout"); + writeln!(writer, "ok").expect("Failed to write to stderr"); } TestStatus::Fail { message, error_diagnostic } => { writer .set_color(ColorSpec::new().set_fg(Some(Color::Red))) .expect("Failed to set color"); - writeln!(writer, "{message}\n").expect("Failed to write to stdout"); + writeln!(writer, "FAIL\n{message}\n").expect("Failed to write to stderr"); if let Some(diag) = error_diagnostic { noirc_errors::reporter::report_all( context.file_manager.as_file_map(), - &[diag], + &[diag.clone()], compile_options.deny_warnings, compile_options.silence_warnings, ); } - count_failed += 1; } TestStatus::CompileError(err) => { noirc_errors::reporter::report_all( context.file_manager.as_file_map(), - &[err], + &[err.clone()], compile_options.deny_warnings, compile_options.silence_warnings, ); - count_failed += 1; } } + + test_report.push((test_name, test_status)); + writer.reset().expect("Failed to reset writer"); } - write!(writer, "[{}] ", package.name).expect("Failed to write to stdout"); + write!(writer, "[{}] ", package.name).expect("Failed to write to stderr"); + let count_failed = + test_report.iter().filter(|(_, status)| !matches!(status, TestStatus::Pass)).count(); if count_failed == 0 { writer.set_color(ColorSpec::new().set_fg(Some(Color::Green))).expect("Failed to set color"); - writeln!(writer, "{count_all} test{plural} passed").expect("Failed to write to stdout"); + write!(writer, "{count_all} test{plural} passed").expect("Failed to write to stderr"); writer.reset().expect("Failed to reset writer"); - - Ok(()) + writeln!(writer).expect("Failed to write to stderr"); } else { let count_passed = count_all - count_failed; let plural_failed = if count_failed == 1 { "" } else { "s" }; @@ -178,13 +222,14 @@ fn run_tests( .set_color(ColorSpec::new().set_fg(Some(Color::Green))) .expect("Failed to set color"); write!(writer, "{count_passed} test{plural_passed} passed, ",) - .expect("Failed to write to stdout"); + .expect("Failed to write to stderr"); } + writer.set_color(ColorSpec::new().set_fg(Some(Color::Red))).expect("Failed to set color"); writeln!(writer, "{count_failed} test{plural_failed} failed") - .expect("Failed to write to stdout"); + .expect("Failed to write to stderr"); writer.reset().expect("Failed to reset writer"); - - Err(CliError::Generic(String::new())) } + + Ok(test_report) } diff --git a/tooling/nargo_cli/src/cli/verify_cmd.rs b/tooling/nargo_cli/src/cli/verify_cmd.rs index 2f8a6efbba4..daf623c10c6 100644 --- a/tooling/nargo_cli/src/cli/verify_cmd.rs +++ b/tooling/nargo_cli/src/cli/verify_cmd.rs @@ -1,17 +1,19 @@ +use super::compile_cmd::report_errors; +use super::fs::{inputs::read_inputs_from_file, load_hex_data}; use super::NargoConfig; -use super::{ - compile_cmd::compile_bin_package, - fs::{inputs::read_inputs_from_file, load_hex_data}, -}; use crate::{backends::Backend, errors::CliError}; use clap::Args; use nargo::constants::{PROOF_EXT, VERIFIER_INPUT_FILE}; +use nargo::ops::compile_program; use nargo::package::Package; use nargo::workspace::Workspace; +use nargo::{insert_all_files_for_workspace_into_file_manager, parse_all}; use nargo_toml::{get_package_manifest, resolve_workspace_from_toml, PackageSelection}; use noirc_abi::input_parser::Format; -use noirc_driver::{CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING}; +use noirc_driver::{ + file_manager_with_stdlib, CompileOptions, CompiledProgram, NOIR_ARTIFACT_VERSION_STRING, +}; use noirc_frontend::graph::CrateName; /// Given a proof and a program, verify whether the proof is valid @@ -48,17 +50,30 @@ pub(crate) fn run( Some(NOIR_ARTIFACT_VERSION_STRING.to_string()), )?; - let (np_language, opcode_support) = backend.get_backend_info()?; - for package in &workspace { - let program = compile_bin_package( - &workspace, + let mut workspace_file_manager = file_manager_with_stdlib(&workspace.root_dir); + insert_all_files_for_workspace_into_file_manager(&workspace, &mut workspace_file_manager); + let parsed_files = parse_all(&workspace_file_manager); + + let expression_width = backend.get_backend_info()?; + let binary_packages = workspace.into_iter().filter(|package| package.is_binary()); + for package in binary_packages { + let compilation_result = compile_program( + &workspace_file_manager, + &parsed_files, package, &args.compile_options, - np_language, - &opcode_support, + expression_width, + None, + ); + + let compiled_program = report_errors( + compilation_result, + &workspace_file_manager, + args.compile_options.deny_warnings, + args.compile_options.silence_warnings, )?; - verify_package(backend, &workspace, package, program, &args.verifier_name)?; + verify_package(backend, &workspace, package, compiled_program, &args.verifier_name)?; } Ok(()) diff --git a/tooling/nargo_cli/src/main.rs b/tooling/nargo_cli/src/main.rs index 7eeca2ab2b0..3f797b0bf0c 100644 --- a/tooling/nargo_cli/src/main.rs +++ b/tooling/nargo_cli/src/main.rs @@ -14,22 +14,27 @@ mod errors; use std::env; use color_eyre::config::HookBuilder; -use env_logger::{Builder, Env}; + use tracing_appender::rolling; +use tracing_subscriber::{fmt::format::FmtSpan, EnvFilter}; const PANIC_MESSAGE: &str = "This is a bug. We may have already fixed this in newer versions of Nargo so try searching for similar issues at https://github.com/noir-lang/noir/issues/.\nIf there isn't an open issue for this bug, consider opening one at https://github.com/noir-lang/noir/issues/new?labels=bug&template=bug_report.yml"; fn main() { - let env = Env::default().filter_or("NOIR_LOG", "error"); // Default to 'error' if NOIR_LOG is not set - Builder::from_env(env).init(); - // Setup tracing if let Ok(log_dir) = env::var("NARGO_LOG_DIR") { let debug_file = rolling::daily(log_dir, "nargo-log"); tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) .with_writer(debug_file) .with_ansi(false) - .with_max_level(tracing::Level::TRACE) + .with_env_filter(EnvFilter::from_default_env()) + .init(); + } else { + tracing_subscriber::fmt() + .with_span_events(FmtSpan::ACTIVE) + .with_ansi(true) + .with_env_filter(EnvFilter::from_env("NOIR_LOG")) .init(); } diff --git a/tooling/nargo_cli/tests/hello_world.rs b/tooling/nargo_cli/tests/hello_world.rs index bc7022d1567..9fcb0c873e1 100644 --- a/tooling/nargo_cli/tests/hello_world.rs +++ b/tooling/nargo_cli/tests/hello_world.rs @@ -1,5 +1,5 @@ //! This integration test aims to mirror the steps taken by a new user using Nargo for the first time. -//! It then follows the steps published at https://noir-lang.org/getting_started/hello_world.html +//! It then follows the steps published at https://noir-lang.org/docs/getting_started/create_a_project //! Any modifications to the commands run here MUST be documented in the noir-lang book. use assert_cmd::prelude::*; diff --git a/tooling/nargo_fmt/tests/expected/contract.nr b/tooling/nargo_fmt/tests/expected/contract.nr index d288b1af7eb..2e3f4d7c8c4 100644 --- a/tooling/nargo_fmt/tests/expected/contract.nr +++ b/tooling/nargo_fmt/tests/expected/contract.nr @@ -3,6 +3,8 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { + use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; + use dep::value_note::{ utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, @@ -11,7 +13,6 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - selector::compute_selector, log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, @@ -19,8 +20,8 @@ contract Benchmarking { }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { @@ -59,7 +60,7 @@ contract Benchmarking { storage.balances.at(owner).write(current + value); let _callStackItem1 = context.call_public_function( context.this_address(), - compute_selector("broadcast(Field)"), + FunctionSelector::from_signature("broadcast(Field)"), [owner] ); } @@ -71,7 +72,7 @@ contract Benchmarking { } unconstrained fn compute_note_hash_and_nullifier( - contract_address: Field, + contract_address: AztecAddress, nonce: Field, storage_slot: Field, preimage: [Field; VALUE_NOTE_LEN] diff --git a/tooling/nargo_fmt/tests/input/contract.nr b/tooling/nargo_fmt/tests/input/contract.nr index 6bc5c552110..2e3f4d7c8c4 100644 --- a/tooling/nargo_fmt/tests/input/contract.nr +++ b/tooling/nargo_fmt/tests/input/contract.nr @@ -3,6 +3,8 @@ // Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. // Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. contract Benchmarking { + use dep::aztec::protocol_types::abis::function_selector::FunctionSelector; + use dep::value_note::{ utils::{increment, decrement}, value_note::{VALUE_NOTE_LEN, ValueNote, ValueNoteMethods}, @@ -11,7 +13,6 @@ contract Benchmarking { use dep::aztec::{ context::{Context}, note::{utils as note_utils, note_getter_options::NoteGetterOptions, note_header::NoteHeader}, - selector::compute_selector, log::emit_unencrypted_log, state_vars::{map::Map, public_state::PublicState, set::Set}, types::type_serialization::field_serialization::{FieldSerializationMethods, FIELD_SERIALIZED_LEN}, @@ -19,8 +20,8 @@ contract Benchmarking { }; struct Storage { - notes: Map>, - balances: Map>, + notes: Map>, + balances: Map>, } impl Storage { @@ -57,7 +58,11 @@ contract Benchmarking { fn increment_balance(owner: Field, value: Field) { let current = storage.balances.at(owner).read(); storage.balances.at(owner).write(current + value); - let _callStackItem1 = context.call_public_function(context.this_address(), compute_selector("broadcast(Field)"), [owner]); + let _callStackItem1 = context.call_public_function( + context.this_address(), + FunctionSelector::from_signature("broadcast(Field)"), + [owner] + ); } // Est ultricies integer quis auctor elit sed. In nibh mauris cursus mattis molestie a iaculis. @@ -66,7 +71,12 @@ contract Benchmarking { emit_unencrypted_log(&mut context, storage.balances.at(owner).read()); } - unconstrained fn compute_note_hash_and_nullifier(contract_address: Field, nonce: Field, storage_slot: Field, preimage: [Field; VALUE_NOTE_LEN]) -> [Field; 4] { + unconstrained fn compute_note_hash_and_nullifier( + contract_address: AztecAddress, + nonce: Field, + storage_slot: Field, + preimage: [Field; VALUE_NOTE_LEN] + ) -> [Field; 4] { let note_header = NoteHeader::new(contract_address, nonce, storage_slot); note_utils::compute_note_hash_and_nullifier(ValueNoteMethods, note_header, preimage) } diff --git a/tooling/nargo_toml/src/errors.rs b/tooling/nargo_toml/src/errors.rs index da976e1b185..440895056c3 100644 --- a/tooling/nargo_toml/src/errors.rs +++ b/tooling/nargo_toml/src/errors.rs @@ -69,6 +69,9 @@ pub enum ManifestError { #[error(transparent)] SemverError(SemverError), + + #[error("Cyclic package dependency found when processing {cycle}")] + CyclicDependency { cycle: String }, } #[allow(clippy::enum_variant_names)] diff --git a/tooling/nargo_toml/src/lib.rs b/tooling/nargo_toml/src/lib.rs index 56024f8ed42..cecc3f7e26a 100644 --- a/tooling/nargo_toml/src/lib.rs +++ b/tooling/nargo_toml/src/lib.rs @@ -120,7 +120,11 @@ struct PackageConfig { } impl PackageConfig { - fn resolve_to_package(&self, root_dir: &Path) -> Result { + fn resolve_to_package( + &self, + root_dir: &Path, + processed: &mut Vec, + ) -> Result { let name: CrateName = if let Some(name) = &self.package.name { name.parse().map_err(|_| ManifestError::InvalidPackageName { toml: root_dir.join("Nargo.toml"), @@ -136,7 +140,7 @@ impl PackageConfig { toml: root_dir.join("Nargo.toml"), name: name.into(), })?; - let resolved_dep = dep_config.resolve_to_dependency(root_dir)?; + let resolved_dep = dep_config.resolve_to_dependency(root_dir, processed)?; dependencies.insert(name, resolved_dep); } @@ -270,7 +274,6 @@ struct PackageMetadata { // We also state that ACIR and the compiler will upgrade in lockstep. // so you will not need to supply an ACIR and compiler version compiler_version: Option, - backend: Option, license: Option, } @@ -284,7 +287,11 @@ enum DependencyConfig { } impl DependencyConfig { - fn resolve_to_dependency(&self, pkg_root: &Path) -> Result { + fn resolve_to_dependency( + &self, + pkg_root: &Path, + processed: &mut Vec, + ) -> Result { let dep = match self { Self::Github { git, tag, directory } => { let dir_path = clone_git_repo(git, tag).map_err(ManifestError::GitError)?; @@ -301,13 +308,13 @@ impl DependencyConfig { dir_path }; let toml_path = project_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Remote { package } } Self::Path { path } => { let dir_path = pkg_root.join(path); let toml_path = dir_path.join("Nargo.toml"); - let package = resolve_package_from_toml(&toml_path)?; + let package = resolve_package_from_toml(&toml_path, processed)?; Dependency::Local { package } } }; @@ -326,9 +333,10 @@ fn toml_to_workspace( nargo_toml: NargoToml, package_selection: PackageSelection, ) -> Result { + let mut resolved = Vec::new(); let workspace = match nargo_toml.config { Config::Package { package_config } => { - let member = package_config.resolve_to_package(&nargo_toml.root_dir)?; + let member = package_config.resolve_to_package(&nargo_toml.root_dir, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) if selected_name != &member.name => { return Err(ManifestError::MissingSelectedPackage(member.name)) @@ -346,7 +354,7 @@ fn toml_to_workspace( for (index, member_path) in workspace_config.members.into_iter().enumerate() { let package_root_dir = nargo_toml.root_dir.join(&member_path); let package_toml_path = package_root_dir.join("Nargo.toml"); - let member = resolve_package_from_toml(&package_toml_path)?; + let member = resolve_package_from_toml(&package_toml_path, &mut resolved)?; match &package_selection { PackageSelection::Selected(selected_name) => { @@ -403,17 +411,43 @@ fn read_toml(toml_path: &Path) -> Result { } /// Resolves a Nargo.toml file into a `Package` struct as defined by our `nargo` core. -fn resolve_package_from_toml(toml_path: &Path) -> Result { +fn resolve_package_from_toml( + toml_path: &Path, + processed: &mut Vec, +) -> Result { + // Checks for cyclic dependencies + let str_path = toml_path.to_str().expect("ICE - path is empty"); + if processed.contains(&str_path.to_string()) { + let mut cycle = false; + let mut message = String::new(); + for toml in processed { + cycle = cycle || toml == str_path; + if cycle { + message += &format!("{} referencing ", toml); + } + } + message += str_path; + return Err(ManifestError::CyclicDependency { cycle: message }); + } + // Adds the package to the set of resolved packages + if let Some(str) = toml_path.to_str() { + processed.push(str.to_string()); + } + let nargo_toml = read_toml(toml_path)?; - match nargo_toml.config { + let result = match nargo_toml.config { Config::Package { package_config } => { - package_config.resolve_to_package(&nargo_toml.root_dir) + package_config.resolve_to_package(&nargo_toml.root_dir, processed) } Config::Workspace { .. } => { Err(ManifestError::UnexpectedWorkspace(toml_path.to_path_buf())) } - } + }; + let pos = + processed.iter().position(|toml| toml == str_path).expect("added package must be here"); + processed.remove(pos); + result } #[derive(Debug, PartialEq, Eq)] diff --git a/tooling/noir_codegen/.gitignore b/tooling/noir_codegen/.gitignore index 15ea344d453..29b0e40ffa8 100644 --- a/tooling/noir_codegen/.gitignore +++ b/tooling/noir_codegen/.gitignore @@ -1,5 +1,5 @@ crs lib -!test/*/target test/codegen +test/test_lib/export diff --git a/tooling/noir_codegen/package.json b/tooling/noir_codegen/package.json index 52ad0321b85..60ccf5ec2a5 100644 --- a/tooling/noir_codegen/package.json +++ b/tooling/noir_codegen/package.json @@ -1,16 +1,24 @@ { "name": "@noir-lang/noir_codegen", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.20.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "compiler/wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/types": "workspace:*", "glob": "^10.3.10", - "lodash": "^4.17.21", "ts-command-line-args": "^2.5.1" }, "files": [ @@ -27,9 +35,9 @@ "dev": "tsc-multi --watch", "build": "tsc", "test": "yarn test:codegen && yarn test:node && yarn test:clean", - "test:codegen": "tsx src/main.ts ./test/assert_lt/target/** --out-dir ./test/codegen", + "test:codegen": "nargo export --program-dir=./test/test_lib && tsx src/main.ts ./test/test_lib/export/** --out-dir ./test/codegen", "test:node": "mocha --timeout 25000 --exit --config ./.mocharc.json", - "test:clean": "rm -rf ./test/codegen", + "test:clean": "rm -rf ./test/codegen ./test/test_lib/export", "prettier": "prettier 'src/**/*.ts'", "prettier:fix": "prettier --write 'src/**/*.ts' 'test/**/*.ts'", "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0", @@ -40,7 +48,6 @@ "devDependencies": { "@noir-lang/noir_js": "workspace:*", "@types/chai": "^4", - "@types/lodash": "^4", "@types/mocha": "^10.0.1", "@types/node": "^20.6.2", "@types/prettier": "^3", diff --git a/tooling/noir_codegen/src/index.ts b/tooling/noir_codegen/src/index.ts index 8d45b76bd7d..fbbab07bcfe 100644 --- a/tooling/noir_codegen/src/index.ts +++ b/tooling/noir_codegen/src/index.ts @@ -1,5 +1,6 @@ +import { AbiType } from '@noir-lang/noirc_abi'; import { CompiledCircuit } from '@noir-lang/types'; -import { PrimitiveTypesUsed, generateTsInterface } from './noir_types.js'; +import { PrimitiveTypesUsed, generateTsInterface, codegenStructDefinitions } from './noir_types.js'; // TODO: reenable this. See `abiTypeToTs` for reasoning. // export type FixedLengthArray = L extends 0 ? never[]: T[] & { length: L }; @@ -8,7 +9,9 @@ const codegenPrelude = `/* Autogenerated file, do not edit! */ /* eslint-disable */ -import { Noir, InputMap, CompiledCircuit } from "@noir-lang/noir_js" +import { Noir, InputMap, CompiledCircuit, ForeignCallHandler } from "@noir-lang/noir_js" + +export { ForeignCallHandler } from "@noir-lang/noir_js" `; const codegenFunction = ( @@ -19,38 +22,41 @@ const codegenFunction = ( const args = function_signature.inputs.map(([name]) => `${name}`).join(', '); const args_with_types = function_signature.inputs.map(([name, type]) => `${name}: ${type}`).join(', '); - return ` -export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; + return `export const ${name}_circuit: CompiledCircuit = ${JSON.stringify(compiled_program)}; -export async function ${name}(${args_with_types}): Promise<${function_signature.returnValue}> { +export async function ${name}(${args_with_types}, foreignCallHandler?: ForeignCallHandler): Promise<${ + function_signature.returnValue + }> { const program = new Noir(${name}_circuit); const args: InputMap = { ${args} }; - const { returnValue } = await program.execute(args); + const { returnValue } = await program.execute(args, foreignCallHandler); return returnValue as ${function_signature.returnValue}; -}`; +} +`; }; export const codegen = (programs: [string, CompiledCircuit][]): string => { let results = [codegenPrelude]; const primitiveTypeMap = new Map(); + const structTypeMap = new Map(); const functions: string[] = []; for (const [name, program] of programs) { - const [types_string, function_sig] = generateTsInterface(program.abi, primitiveTypeMap); - functions.push(types_string); - functions.push('\n'); + const function_sig = generateTsInterface(program.abi, structTypeMap, primitiveTypeMap); functions.push(codegenFunction(name, stripUnwantedFields(program), function_sig)); } + const structTypeDefinitions: string = codegenStructDefinitions(structTypeMap, primitiveTypeMap); + // Add the primitive Noir types that do not have a 1-1 mapping to TypeScript. const primitiveTypeAliases: string[] = []; for (const value of primitiveTypeMap.values()) { primitiveTypeAliases.push(`export type ${value.aliasName} = ${value.tsType};`); } - results = results.concat(...primitiveTypeAliases, ...functions); + results = results.concat(...primitiveTypeAliases, '', structTypeDefinitions, ...functions); - return results.filter((val) => val !== '').join('\n'); + return results.join('\n'); }; // eslint-disable-next-line @typescript-eslint/no-explicit-any diff --git a/tooling/noir_codegen/src/noir_types.ts b/tooling/noir_codegen/src/noir_types.ts index ba4f8650b3b..0c0e2b7c60f 100644 --- a/tooling/noir_codegen/src/noir_types.ts +++ b/tooling/noir_codegen/src/noir_types.ts @@ -112,43 +112,26 @@ function getLastComponentOfPath(str: string): string { */ function generateStructInterfaces( type: AbiType, - output: Set, + structsEncountered: Map, primitiveTypeMap: Map, -): string { - let result = ''; - +) { // Edge case to handle the array of structs case. - if (type.kind === 'array' && type.type.kind === 'struct' && !output.has(getLastComponentOfPath(type.type.path))) { - result += generateStructInterfaces(type.type, output, primitiveTypeMap); + if ( + type.kind === 'array' && + type.type.kind === 'struct' && + !structsEncountered.has(getLastComponentOfPath(type.type.path)) + ) { + generateStructInterfaces(type.type, structsEncountered, primitiveTypeMap); } - if (type.kind !== 'struct') return result; - - // List of structs encountered while viewing this type that we need to generate - // bindings for. - const typesEncountered = new Set(); - - // Codegen the struct and then its fields, so that the structs fields - // are defined before the struct itself. - let codeGeneratedStruct = ''; - let codeGeneratedStructFields = ''; + if (type.kind !== 'struct') return; const structName = getLastComponentOfPath(type.path); - if (!output.has(structName)) { - codeGeneratedStruct += `export type ${structName} = {\n`; + if (!structsEncountered.has(structName)) { for (const field of type.fields) { - codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; - typesEncountered.add(field.type); - } - codeGeneratedStruct += `};`; - output.add(structName); - - // Generate code for the encountered structs in the field above - for (const type of typesEncountered) { - codeGeneratedStructFields += generateStructInterfaces(type, output, primitiveTypeMap); + generateStructInterfaces(field.type, structsEncountered, primitiveTypeMap); } + structsEncountered.set(structName, type.fields); } - - return codeGeneratedStructFields + '\n' + codeGeneratedStruct; } /** @@ -158,22 +141,37 @@ function generateStructInterfaces( */ export function generateTsInterface( abiObj: Abi, + structsEncountered: Map, primitiveTypeMap: Map, -): [string, { inputs: [string, string][]; returnValue: string | null }] { - let result = ``; - const outputStructs = new Set(); - +): { inputs: [string, string][]; returnValue: string | null } { // Define structs for composite types for (const param of abiObj.parameters) { - result += generateStructInterfaces(param.type, outputStructs, primitiveTypeMap); + generateStructInterfaces(param.type, structsEncountered, primitiveTypeMap); } // Generating Return type, if it exists if (abiObj.return_type != null) { - result += generateStructInterfaces(abiObj.return_type.abi_type, outputStructs, primitiveTypeMap); + generateStructInterfaces(abiObj.return_type.abi_type, structsEncountered, primitiveTypeMap); + } + + return getTsFunctionSignature(abiObj, primitiveTypeMap); +} + +export function codegenStructDefinitions( + structsEncountered: Map, + primitiveTypeMap: Map, +): string { + let codeGeneratedStruct = ''; + + for (const [structName, structFields] of structsEncountered) { + codeGeneratedStruct += `export type ${structName} = {\n`; + for (const field of structFields) { + codeGeneratedStruct += ` ${field.name}: ${abiTypeToTs(field.type, primitiveTypeMap)};\n`; + } + codeGeneratedStruct += `};\n\n`; } - return [result, getTsFunctionSignature(abiObj, primitiveTypeMap)]; + return codeGeneratedStruct; } function getTsFunctionSignature( diff --git a/tooling/noir_codegen/src/utils/glob.ts b/tooling/noir_codegen/src/utils/glob.ts index 15deaf72e44..a1e4c3b1ea1 100644 --- a/tooling/noir_codegen/src/utils/glob.ts +++ b/tooling/noir_codegen/src/utils/glob.ts @@ -1,9 +1,6 @@ import { sync as globSync } from 'glob'; -import _ from 'lodash'; -const { flatten, uniq } = _; export function glob(cwd: string, patternsOrFiles: string[]): string[] { const matches = patternsOrFiles.map((p) => globSync(p, { ignore: 'node_modules/**', absolute: true, cwd })); - - return uniq(flatten(matches)); + return [...new Set(matches.flat())]; } diff --git a/tooling/noir_codegen/test/assert_lt/src/main.nr b/tooling/noir_codegen/test/assert_lt/src/main.nr deleted file mode 100644 index 3b3e04ddece..00000000000 --- a/tooling/noir_codegen/test/assert_lt/src/main.nr +++ /dev/null @@ -1,19 +0,0 @@ -struct MyStruct { - foo: bool, - bar: [str<5>; 3], -} - -fn main( - x: u64, - y: pub u64, - array: [u8; 5], - my_struct: MyStruct, - string: str<5> -) -> pub (u64, u64, MyStruct) { - assert(array.len() == 5); - assert(my_struct.foo); - assert(string == "12345"); - - assert(x < y); - (x + y, 3, my_struct) -} diff --git a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json b/tooling/noir_codegen/test/assert_lt/target/assert_lt.json deleted file mode 100644 index a1ab87a99fe..00000000000 --- a/tooling/noir_codegen/test/assert_lt/target/assert_lt.json +++ /dev/null @@ -1 +0,0 @@ -{"noir_version":"0.19.4+55670ff82c270534a4bdb999ab0de5cea7017093","hash":11505576107297330043,"backend":"acvm-backend-barretenberg","abi":{"parameters":[{"name":"x","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"private"},{"name":"y","type":{"kind":"integer","sign":"unsigned","width":64},"visibility":"public"},{"name":"array","type":{"kind":"array","length":5,"type":{"kind":"integer","sign":"unsigned","width":8}},"visibility":"private"},{"name":"my_struct","type":{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]},"visibility":"private"},{"name":"string","type":{"kind":"string","length":5},"visibility":"private"}],"param_witnesses":{"array":[{"start":3,"end":8}],"my_struct":[{"start":8,"end":24}],"string":[{"start":24,"end":29}],"x":[{"start":1,"end":2}],"y":[{"start":2,"end":3}]},"return_type":{"abi_type":{"kind":"tuple","fields":[{"kind":"integer","sign":"unsigned","width":64},{"kind":"integer","sign":"unsigned","width":64},{"kind":"struct","path":"MyStruct","fields":[{"name":"foo","type":{"kind":"boolean"}},{"name":"bar","type":{"kind":"array","length":3,"type":{"kind":"string","length":5}}}]}]},"visibility":"public"},"return_witnesses":[31,32,33,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]},"bytecode":"H4sIAAAAAAAA/81XbU/CMBDu5hv4gopvvGw49JOJH1q2wfaN+E+AddFEgzGL/H250Go5dInumnhJ0z2jXJ9er7s+t4yxe7YyZ9lc1Y8N7CK8tWw1A28jvIPwLsJ7Cus5mfIPxquZqBlzmX5DPowiORpIEYoJH6TTJOZRPB0mIhFxEmeDJAxlEiWjdJqOeCqiUIo8TsNcOa7RceQ6DnUUl32EDxA+RPgI4QbCxwifIHyKcBPhM4TPEb5A+BLhK4RbCLcR7iDcRdhjX3mjzUb+jIlyxibPFgFPmYNlVnm2yXjOcps8O3Q8pU2eXTqemU2eHh3PGdQbl22aS8zZYXRn3/07L4FffLN0Mt9mXH3V99iqhuu80GOgzj+wzZxxjGdXjXFLxjg/+Kkb7/T/G8bvVRe/EQxzciqfvgok9QXEp+P4eQHpGT61bRHHw9ahqurrhjCeZfH7JU+OeAqfcM09wn2tEL/SD9x/Pjdl+8yr2do54dVMUJ6Ta0b/3TF92tr3gI53aJNnn3DfuwZHyE8o2FDIQYBr0Q1FFoQmiEsQlCAiociCWASBCKIQhCCIPxB8IPJA2IGYA9EBF3q4LMNcHlsv/E31XGUOyI1g2fpsvfDfqd5T/aQo5MtrERTzYJJlweKpeAzm7/Itf54vPgBYg2KL1RAAAA=="} \ No newline at end of file diff --git a/tooling/noir_codegen/test/index.test.ts b/tooling/noir_codegen/test/index.test.ts index 48199c13a67..03fb680a537 100644 --- a/tooling/noir_codegen/test/index.test.ts +++ b/tooling/noir_codegen/test/index.test.ts @@ -1,16 +1,106 @@ import { expect } from 'chai'; -import { assert_lt, MyStruct, u64 } from './codegen/index.js'; +// eslint-disable-next-line @typescript-eslint/ban-ts-comment +// @ts-ignore File is codegenned at test time. +import { exported_function_foo, MyStruct, u64, ForeignCallHandler } from './codegen/index.js'; it('codegens a callable function', async () => { - const [sum, constant, struct]: [u64, u64, MyStruct] = await assert_lt( + const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + + const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( '2', '3', [0, 0, 0, 0, 0], - { foo: true, bar: ['12345', '12345', '12345'] }, + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, '12345', ); expect(sum).to.be.eq('0x05'); expect(constant).to.be.eq('0x03'); - expect(struct).to.be.deep.eq({ foo: true, bar: ['12345', '12345', '12345'] }); + expect(struct).to.be.deep.eq(my_struct); +}); + +it('allows passing a custom foreign call handler', async () => { + let observedName = ''; + let observedInputs: string[][] = []; + const foreignCallHandler: ForeignCallHandler = async (name: string, inputs: string[][]) => { + // Throwing inside the oracle callback causes a timeout so we log the observed values + // and defer the check against expected values until after the execution is complete. + observedName = name; + observedInputs = inputs; + + return []; + }; + + const my_struct = { foo: true, bar: ['12345', '12345', '12345'], baz: '0x00' }; + + const [sum, constant, struct]: [u64, u64, MyStruct] = await exported_function_foo( + '2', + '3', + [0, 0, 0, 0, 0], + { + foo: my_struct, + bar: [my_struct, my_struct, my_struct], + baz: '64', + }, + '12345', + foreignCallHandler, + ); + + expect(observedName).to.be.eq('print'); + expect(observedInputs).to.be.deep.eq([ + // add newline? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + // x + ['0x0000000000000000000000000000000000000000000000000000000000000002'], + // Type metadata + [ + '0x000000000000000000000000000000000000000000000000000000000000007b', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000006b', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000075', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000073', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x000000000000000000000000000000000000000000000000000000000000006e', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000067', + '0x0000000000000000000000000000000000000000000000000000000000000065', + '0x0000000000000000000000000000000000000000000000000000000000000072', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000002c', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x0000000000000000000000000000000000000000000000000000000000000077', + '0x0000000000000000000000000000000000000000000000000000000000000069', + '0x0000000000000000000000000000000000000000000000000000000000000064', + '0x0000000000000000000000000000000000000000000000000000000000000074', + '0x0000000000000000000000000000000000000000000000000000000000000068', + '0x0000000000000000000000000000000000000000000000000000000000000022', + '0x000000000000000000000000000000000000000000000000000000000000003a', + '0x0000000000000000000000000000000000000000000000000000000000000036', + '0x0000000000000000000000000000000000000000000000000000000000000034', + '0x000000000000000000000000000000000000000000000000000000000000007d', + ], + // format string? + ['0x0000000000000000000000000000000000000000000000000000000000000000'], + ]); + + expect(sum).to.be.eq('0x05'); + expect(constant).to.be.eq('0x03'); + expect(struct).to.be.deep.eq(my_struct); }); diff --git a/tooling/noir_codegen/test/test_lib/Nargo.toml b/tooling/noir_codegen/test/test_lib/Nargo.toml new file mode 100644 index 00000000000..74b6167b614 --- /dev/null +++ b/tooling/noir_codegen/test/test_lib/Nargo.toml @@ -0,0 +1,5 @@ +[package] +name = "test_lib" +type = "lib" +authors = [""] +[dependencies] diff --git a/tooling/noir_codegen/test/test_lib/src/lib.nr b/tooling/noir_codegen/test/test_lib/src/lib.nr new file mode 100644 index 00000000000..23607c6f65f --- /dev/null +++ b/tooling/noir_codegen/test/test_lib/src/lib.nr @@ -0,0 +1,27 @@ +struct MyStruct { + foo: bool, + bar: [str<5>; 3], + baz: Field +} + +struct NestedStruct { + foo: MyStruct, + bar: [MyStruct; 3], + baz: u64 +} + +#[export] +fn exported_function_foo(x: u64, y: u64, array: [u8; 5], my_struct: NestedStruct, string: str<5>) -> (u64, u64, MyStruct) { + assert(array.len() == 5); + assert(my_struct.foo.foo); + assert(string == "12345"); + + print(x); + assert(x < y); + (x + y, 3, my_struct.foo) +} + +#[export] +fn exported_function_bar(my_struct: NestedStruct) -> (u64) { + my_struct.baz +} diff --git a/tooling/noir_js/package.json b/tooling/noir_js/package.json index e42cbb1d162..356909a1e35 100644 --- a/tooling/noir_js/package.json +++ b/tooling/noir_js/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/noir_js", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.20.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "dependencies": { "@noir-lang/acvm_js": "workspace:*", "@noir-lang/noirc_abi": "workspace:*", diff --git a/tooling/noir_js/src/witness_generation.ts b/tooling/noir_js/src/witness_generation.ts index a329c79c919..1f233422061 100644 --- a/tooling/noir_js/src/witness_generation.ts +++ b/tooling/noir_js/src/witness_generation.ts @@ -1,8 +1,24 @@ import { abiEncode, InputMap } from '@noir-lang/noirc_abi'; import { base64Decode } from './base64_decode.js'; -import { executeCircuit, WitnessMap, ForeignCallHandler, ForeignCallInput } from '@noir-lang/acvm_js'; +import { + WitnessMap, + ForeignCallHandler, + ForeignCallInput, + createBlackBoxSolver, + WasmBlackBoxFunctionSolver, + executeCircuitWithBlackBoxSolver, +} from '@noir-lang/acvm_js'; import { CompiledCircuit } from '@noir-lang/types'; +let solver: Promise; + +const getSolver = (): Promise => { + if (!solver) { + solver = createBlackBoxSolver(); + } + return solver; +}; + const defaultForeignCallHandler: ForeignCallHandler = async (name: string, args: ForeignCallInput[]) => { if (name == 'print') { // By default we do not print anything for `print` foreign calls due to a need for formatting, @@ -26,7 +42,12 @@ export async function generateWitness( // Execute the circuit to generate the rest of the witnesses and serialize // them into a Uint8Array. try { - const solvedWitness = await executeCircuit(base64Decode(compiledProgram.bytecode), witnessMap, foreignCallHandler); + const solvedWitness = await executeCircuitWithBlackBoxSolver( + await getSolver(), + base64Decode(compiledProgram.bytecode), + witnessMap, + foreignCallHandler, + ); return solvedWitness; } catch (err) { throw new Error(`Circuit execution failed: ${err}`); diff --git a/tooling/noir_js_backend_barretenberg/package.json b/tooling/noir_js_backend_barretenberg/package.json index 623a290410c..cd2a6354ac4 100644 --- a/tooling/noir_js_backend_barretenberg/package.json +++ b/tooling/noir_js_backend_barretenberg/package.json @@ -1,12 +1,21 @@ { "name": "@noir-lang/backend_barretenberg", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.20.0", + "version": "0.23.0", "packageManager": "yarn@3.5.1", "license": "(MIT OR Apache-2.0)", "type": "module", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_backend_barretenberg", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "source": "src/index.ts", "main": "lib/cjs/index.js", "module": "lib/esm/index.js", @@ -33,7 +42,7 @@ "lint": "NODE_NO_WARNINGS=1 eslint . --ext .ts --ignore-path ./.eslintignore --max-warnings 0" }, "dependencies": { - "@aztec/bb.js": "0.16.0", + "@aztec/bb.js": "0.19.0", "@noir-lang/types": "workspace:*", "fflate": "^0.8.0" }, diff --git a/tooling/noir_js_backend_barretenberg/src/index.ts b/tooling/noir_js_backend_barretenberg/src/index.ts index 100418debd0..61094a3451f 100644 --- a/tooling/noir_js_backend_barretenberg/src/index.ts +++ b/tooling/noir_js_backend_barretenberg/src/index.ts @@ -1,11 +1,11 @@ -/* eslint-disable @typescript-eslint/no-explicit-any */ import { decompressSync as gunzip } from 'fflate'; import { acirToUint8Array } from './serialize.js'; import { Backend, CompiledCircuit, ProofData } from '@noir-lang/types'; import { BackendOptions } from './types.js'; import { deflattenPublicInputs, flattenPublicInputsAsArray } from './public_inputs.js'; +import { type Barretenberg } from '@aztec/bb.js'; -export { flattenPublicInputs } from './public_inputs.js'; +export { publicInputsToWitnessMap } from './public_inputs.js'; // This is the number of bytes in a UltraPlonk proof // minus the public inputs. @@ -16,12 +16,14 @@ export class BarretenbergBackend implements Backend { // have to initialize `api` and `acirComposer` in the constructor. // These are initialized asynchronously in the `init` function, // constructors cannot be asynchronous which is why we do this. - private api: any; + + private api!: Barretenberg; + // eslint-disable-next-line @typescript-eslint/no-explicit-any private acirComposer: any; private acirUncompressedBytecode: Uint8Array; constructor( - private acirCircuit: CompiledCircuit, + acirCircuit: CompiledCircuit, private options: BackendOptions = { threads: 1 }, ) { const acirBytecodeBase64 = acirCircuit.bytecode; @@ -31,10 +33,8 @@ export class BarretenbergBackend implements Backend { /** @ignore */ async instantiate(): Promise { if (!this.api) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - //@ts-ignore const { Barretenberg, RawBuffer, Crs } = await import('@aztec/bb.js'); - const api = await Barretenberg.new(this.options.threads); + const api = await Barretenberg.new({ threads: this.options.threads }); const [_exact, _total, subgroupSize] = await api.acirGetCircuitSizes(this.acirUncompressedBytecode); const crs = await Crs.new(subgroupSize + 1); @@ -47,29 +47,25 @@ export class BarretenbergBackend implements Backend { } } - // Generate an outer proof. This is the proof for the circuit which will verify - // inner proofs and or can be seen as the proof created for regular circuits. - // - // The settings for this proof are the same as the settings for a "normal" proof - // ie one that is not in the recursive setting. + /** + * Generate a final proof. This is the proof for the circuit which will verify + * intermediate proofs and or can be seen as the proof created for regular circuits. + */ async generateFinalProof(decompressedWitness: Uint8Array): Promise { + // The settings for this proof are the same as the settings for a "normal" proof + // i.e. one that is not in the recursive setting. const makeEasyToVerifyInCircuit = false; return this.generateProof(decompressedWitness, makeEasyToVerifyInCircuit); } - // Generates an inner proof. This is the proof that will be verified - // in another circuit. - // - // This is sometimes referred to as a recursive proof. - // We avoid this terminology as the only property of this proof - // that matters, is the fact that it is easy to verify in another - // circuit. We _could_ choose to verify this proof in the CLI. - // - // We set `makeEasyToVerifyInCircuit` to true, which will tell the backend to - // generate the proof using components that will make the proof - // easier to verify in a circuit. - /** + * Generates an intermediate proof. This is the proof that can be verified + * in another circuit. + * + * This is sometimes referred to as a recursive proof. + * We avoid this terminology as the only property of this proof + * that matters is the fact that it is easy to verify in another circuit. + * We _could_ choose to verify this proof outside of a circuit just as easily. * * @example * ```typescript @@ -77,6 +73,9 @@ export class BarretenbergBackend implements Backend { * ``` */ async generateIntermediateProof(witness: Uint8Array): Promise { + // We set `makeEasyToVerifyInCircuit` to true, which will tell the backend to + // generate the proof using components that will make the proof + // easier to verify in a circuit. const makeEasyToVerifyInCircuit = true; return this.generateProof(witness, makeEasyToVerifyInCircuit); } @@ -95,22 +94,21 @@ export class BarretenbergBackend implements Backend { const publicInputsConcatenated = proofWithPublicInputs.slice(0, splitIndex); const proof = proofWithPublicInputs.slice(splitIndex); - const publicInputs = deflattenPublicInputs(publicInputsConcatenated, this.acirCircuit.abi); + const publicInputs = deflattenPublicInputs(publicInputsConcatenated); return { proof, publicInputs }; } - // Generates artifacts that will be passed to a circuit that will verify this proof. - // - // Instead of passing the proof and verification key as a byte array, we pass them - // as fields which makes it cheaper to verify in a circuit. - // - // The proof that is passed here will have been created using the `generateInnerProof` - // method. - // - // The number of public inputs denotes how many public inputs are in the inner proof. - /** + * Generates artifacts that will be passed to a circuit that will verify this proof. + * + * Instead of passing the proof and verification key as a byte array, we pass them + * as fields which makes it cheaper to verify in a circuit. + * + * The proof that is passed here will have been created using the `generateIntermediateProof` + * method. + * + * The number of public inputs denotes how many public inputs are in the inner proof. * * @example * ```typescript @@ -127,7 +125,9 @@ export class BarretenbergBackend implements Backend { }> { await this.instantiate(); const proof = reconstructProofWithPublicInputs(proofData); - const proofAsFields = await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs); + const proofAsFields = ( + await this.api.acirSerializeProofIntoFields(this.acirComposer, proof, numOfPublicInputs) + ).slice(numOfPublicInputs); // TODO: perhaps we should put this in the init function. Need to benchmark // TODO how long it takes. diff --git a/tooling/noir_js_backend_barretenberg/src/public_inputs.ts b/tooling/noir_js_backend_barretenberg/src/public_inputs.ts index 37bc5b13012..75ee0de6800 100644 --- a/tooling/noir_js_backend_barretenberg/src/public_inputs.ts +++ b/tooling/noir_js_backend_barretenberg/src/public_inputs.ts @@ -1,18 +1,11 @@ import { Abi, WitnessMap } from '@noir-lang/types'; -export function flattenPublicInputs(publicInputs: WitnessMap): string[] { - const publicInputIndices = [...publicInputs.keys()].sort((a, b) => a - b); - const flattenedPublicInputs = publicInputIndices.map((index) => publicInputs.get(index) as string); - return flattenedPublicInputs; -} - -export function flattenPublicInputsAsArray(publicInputs: WitnessMap): Uint8Array { - const flatPublicInputs = flattenPublicInputs(publicInputs); - const flattenedPublicInputs = flatPublicInputs.map(hexToUint8Array); +export function flattenPublicInputsAsArray(publicInputs: string[]): Uint8Array { + const flattenedPublicInputs = publicInputs.map(hexToUint8Array); return flattenUint8Arrays(flattenedPublicInputs); } -export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array, abi: Abi): WitnessMap { +export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array): string[] { const publicInputSize = 32; const chunkedFlattenedPublicInputs: Uint8Array[] = []; @@ -21,6 +14,16 @@ export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array, abi: Ab chunkedFlattenedPublicInputs.push(publicInput); } + return chunkedFlattenedPublicInputs.map(uint8ArrayToHex); +} + +export function witnessMapToPublicInputs(publicInputs: WitnessMap): string[] { + const publicInputIndices = [...publicInputs.keys()].sort((a, b) => a - b); + const flattenedPublicInputs = publicInputIndices.map((index) => publicInputs.get(index) as string); + return flattenedPublicInputs; +} + +export function publicInputsToWitnessMap(publicInputs: string[], abi: Abi): WitnessMap { const return_value_witnesses = abi.return_witnesses; const public_parameters = abi.parameters.filter((param) => param.visibility === 'public'); const public_parameter_witnesses: number[] = public_parameters.flatMap((param) => @@ -35,13 +38,13 @@ export function deflattenPublicInputs(flattenedPublicInputs: Uint8Array, abi: Ab (a, b) => a - b, ); - const publicInputs: WitnessMap = new Map(); + const witnessMap: WitnessMap = new Map(); public_input_witnesses.forEach((witness_index, index) => { - const witness_value = uint8ArrayToHex(chunkedFlattenedPublicInputs[index]); - publicInputs.set(witness_index, witness_value); + const witness_value = publicInputs[index]; + witnessMap.set(witness_index, witness_value); }); - return publicInputs; + return witnessMap; } function flattenUint8Arrays(arrays: Uint8Array[]): Uint8Array { diff --git a/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts b/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts index dab1c56436a..079a1ad268b 100644 --- a/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts +++ b/tooling/noir_js_backend_barretenberg/test/public_input_deflattening.test.ts @@ -1,6 +1,6 @@ import { Abi } from '@noir-lang/types'; import { expect } from 'chai'; -import { flattenPublicInputsAsArray, deflattenPublicInputs, flattenPublicInputs } from '../src/public_inputs.js'; +import { witnessMapToPublicInputs, publicInputsToWitnessMap } from '../src/public_inputs.js'; const abi: Abi = { parameters: [ @@ -69,7 +69,7 @@ it('flattens a witness map in order of its witness indices', async () => { ]), ); - const flattened_public_inputs = flattenPublicInputs(witness_map); + const flattened_public_inputs = witnessMapToPublicInputs(witness_map); expect(flattened_public_inputs).to.be.deep.eq([ '0x0000000000000000000000000000000000000000000000000000000000000002', '0x000000000000000000000000000000000000000000000000000000000000000b', @@ -89,8 +89,8 @@ it('recovers the original witness map when deflattening a public input array', a ]), ); - const flattened_public_inputs = flattenPublicInputsAsArray(witness_map); - const deflattened_public_inputs = deflattenPublicInputs(flattened_public_inputs, abi); + const flattened_public_inputs = witnessMapToPublicInputs(witness_map); + const deflattened_public_inputs = publicInputsToWitnessMap(flattened_public_inputs, abi); expect(deflattened_public_inputs).to.be.deep.eq(witness_map); }); diff --git a/tooling/noir_js_types/package.json b/tooling/noir_js_types/package.json index b322b9b6207..ef75f3d2fb3 100644 --- a/tooling/noir_js_types/package.json +++ b/tooling/noir_js_types/package.json @@ -1,11 +1,20 @@ { "name": "@noir-lang/types", - "collaborators": [ + "contributors": [ "The Noir Team " ], "packageManager": "yarn@3.5.1", - "version": "0.20.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noir_js_types", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "lib", "package.json" diff --git a/tooling/noir_js_types/src/types.ts b/tooling/noir_js_types/src/types.ts index b997d92425d..ee4921bd606 100644 --- a/tooling/noir_js_types/src/types.ts +++ b/tooling/noir_js_types/src/types.ts @@ -1,4 +1,4 @@ -import { Abi, WitnessMap } from '@noir-lang/noirc_abi'; +import { Abi } from '@noir-lang/noirc_abi'; export { Abi, WitnessMap } from '@noir-lang/noirc_abi'; @@ -45,7 +45,7 @@ export interface Backend { * */ export type ProofData = { /** @description Public inputs of a proof */ - publicInputs: WitnessMap; + publicInputs: string[]; /** @description An byte array representing the proof */ proof: Uint8Array; }; diff --git a/tooling/noirc_abi/src/input_parser/mod.rs b/tooling/noirc_abi/src/input_parser/mod.rs index 26c5a89c83a..f66e069d487 100644 --- a/tooling/noirc_abi/src/input_parser/mod.rs +++ b/tooling/noirc_abi/src/input_parser/mod.rs @@ -215,7 +215,7 @@ fn parse_str_to_field(value: &str) -> Result { }) } -fn parse_str_to_signed(value: &str, witdh: u32) -> Result { +fn parse_str_to_signed(value: &str, width: u32) -> Result { let big_num = if let Some(hex) = value.strip_prefix("0x") { BigInt::from_str_radix(hex, 16) } else { @@ -225,7 +225,7 @@ fn parse_str_to_signed(value: &str, witdh: u32) -> Result unreachable!("format strings cannot be used in the abi"), - Type::Error => unreachable!(), - Type::Unit => unreachable!(), - Type::Constant(_) => unreachable!(), - Type::TraitAsType(..) => unreachable!(), + Type::Struct(def, ref args) => { let struct_type = def.borrow(); let fields = struct_type.get_fields(args); @@ -175,12 +171,17 @@ impl AbiType { let fields = vecmap(fields, |typ| Self::from_type(context, typ)); Self::Tuple { fields } } - Type::TypeVariable(_, _) => unreachable!(), - Type::NamedGeneric(..) => unreachable!(), - Type::Forall(..) => unreachable!(), - Type::Function(_, _, _) => unreachable!(), + Type::Error + | Type::Unit + | Type::Constant(_) + | Type::TraitAsType(..) + | Type::TypeVariable(_, _) + | Type::NamedGeneric(..) + | Type::Forall(..) + | Type::NotConstant + | Type::Function(_, _, _) => unreachable!("Type cannot be used in the abi"), + Type::FmtString(_, _) => unreachable!("format strings cannot be used in the abi"), Type::MutableReference(_) => unreachable!("&mut cannot be used in the abi"), - Type::NotConstant => unreachable!(), } } diff --git a/tooling/noirc_abi_wasm/package.json b/tooling/noirc_abi_wasm/package.json index fe801a40d5e..db0f6c29153 100644 --- a/tooling/noirc_abi_wasm/package.json +++ b/tooling/noirc_abi_wasm/package.json @@ -1,10 +1,19 @@ { "name": "@noir-lang/noirc_abi", - "collaborators": [ + "contributors": [ "The Noir Team " ], - "version": "0.20.0", + "version": "0.23.0", "license": "(MIT OR Apache-2.0)", + "homepage": "https://noir-lang.org/", + "repository": { + "url": "https://github.com/noir-lang/noir.git", + "directory": "tooling/noirc_abi_wasm", + "type": "git" + }, + "bugs": { + "url": "https://github.com/noir-lang/noir/issues" + }, "files": [ "nodejs", "web", @@ -17,10 +26,6 @@ "types": "./web/noirc_abi_wasm.d.ts", "module": "./web/noirc_abi_wasm.js", "sideEffects": false, - "repository": { - "type": "git", - "url": "https://github.com/noir-lang/noir.git" - }, "scripts": { "build": "bash ./build.sh", "test": "env TS_NODE_COMPILER_OPTIONS='{\"module\": \"commonjs\" }' mocha", diff --git a/tooling/noirc_abi_wasm/src/lib.rs b/tooling/noirc_abi_wasm/src/lib.rs index fb4c295b8c8..5557cc917bf 100644 --- a/tooling/noirc_abi_wasm/src/lib.rs +++ b/tooling/noirc_abi_wasm/src/lib.rs @@ -27,7 +27,7 @@ use js_witness_map::JsWitnessMap; #[wasm_bindgen(typescript_custom_section)] const INPUT_MAP: &'static str = r#" export type Field = string | number | boolean; -export type InputValue = Field | Field[] | InputMap; +export type InputValue = Field | InputMap | (Field | InputMap)[]; export type InputMap = { [key: string]: InputValue }; "#; diff --git a/tooling/noirc_abi_wasm/test/browser/structs.test.ts b/tooling/noirc_abi_wasm/test/browser/structs.test.ts new file mode 100644 index 00000000000..84352d986ab --- /dev/null +++ b/tooling/noirc_abi_wasm/test/browser/structs.test.ts @@ -0,0 +1,26 @@ +import { expect } from '@esm-bundle/chai'; +import initNoirAbi, { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { MyNestedStruct, MyStruct } from '../shared/structs'; +import { DecodedInputs } from '../types'; + +beforeEach(async () => { + await initNoirAbi(); +}); + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/tooling/noirc_abi_wasm/test/node/structs.test.ts b/tooling/noirc_abi_wasm/test/node/structs.test.ts new file mode 100644 index 00000000000..a7d104b46d3 --- /dev/null +++ b/tooling/noirc_abi_wasm/test/node/structs.test.ts @@ -0,0 +1,22 @@ +import { expect } from 'chai'; +import { abiEncode, abiDecode, WitnessMap } from '@noir-lang/noirc_abi'; +import { MyNestedStruct, MyStruct } from '../shared/structs'; +import { DecodedInputs } from '../types'; + +it('correctly handles struct inputs', async () => { + const { abi, inputs } = await import('../shared/structs'); + + const initial_witness: WitnessMap = abiEncode(abi, inputs); + const decoded_inputs: DecodedInputs = abiDecode(abi, initial_witness); + + const struct_arg: MyStruct = inputs.struct_arg as MyStruct; + const struct_array_arg: MyStruct[] = inputs.struct_array_arg as MyStruct[]; + const nested_struct_arg: MyNestedStruct = inputs.nested_struct_arg as MyNestedStruct; + + expect(BigInt(decoded_inputs.inputs.struct_arg.foo)).to.be.equal(BigInt(struct_arg.foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[0].foo)).to.be.equal(BigInt(struct_array_arg[0].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[1].foo)).to.be.equal(BigInt(struct_array_arg[1].foo)); + expect(BigInt(decoded_inputs.inputs.struct_array_arg[2].foo)).to.be.equal(BigInt(struct_array_arg[2].foo)); + expect(BigInt(decoded_inputs.inputs.nested_struct_arg.foo.foo)).to.be.equal(BigInt(nested_struct_arg.foo.foo)); + expect(decoded_inputs.return_value).to.be.null; +}); diff --git a/tooling/noirc_abi_wasm/test/shared/structs.ts b/tooling/noirc_abi_wasm/test/shared/structs.ts new file mode 100644 index 00000000000..6614f8f278e --- /dev/null +++ b/tooling/noirc_abi_wasm/test/shared/structs.ts @@ -0,0 +1,79 @@ +import { Abi, Field, InputMap } from '@noir-lang/noirc_abi'; + +export type MyStruct = { + foo: Field; +}; + +export type MyNestedStruct = { + foo: MyStruct; +}; + +export const abi: Abi = { + parameters: [ + { + name: 'struct_arg', + type: { kind: 'struct', path: 'MyStruct', fields: [{ name: 'foo', type: { kind: 'field' } }] }, + visibility: 'private', + }, + { + name: 'struct_array_arg', + type: { + kind: 'array', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + length: 3, + }, + visibility: 'private', + }, + { + name: 'nested_struct_arg', + type: { + kind: 'struct', + path: 'MyNestedStruct', + fields: [ + { + name: 'foo', + type: { + kind: 'struct', + path: 'MyStruct', + fields: [{ name: 'foo', type: { kind: 'field' } }], + }, + }, + ], + }, + visibility: 'private', + }, + ], + param_witnesses: { + struct_arg: [{ start: 1, end: 2 }], + struct_array_arg: [{ start: 2, end: 5 }], + nested_struct_arg: [{ start: 5, end: 6 }], + }, + return_type: null, + return_witnesses: [], +}; + +export const inputs: InputMap = { + struct_arg: { + foo: '1', + }, + struct_array_arg: [ + { + foo: '2', + }, + { + foo: '3', + }, + { + foo: '4', + }, + ], + nested_struct_arg: { + foo: { + foo: '5', + }, + }, +}; diff --git a/versions.json b/versions.json deleted file mode 100644 index a1c826264f4..00000000000 --- a/versions.json +++ /dev/null @@ -1,4 +0,0 @@ -[ - "v0.19.4", - "v0.17.0" -] \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index fab2866a228..db3f493bc62 100644 --- a/yarn.lock +++ b/yarn.lock @@ -235,6 +235,20 @@ __metadata: languageName: node linkType: hard +"@aztec/bb.js@npm:0.19.0": + version: 0.19.0 + resolution: "@aztec/bb.js@npm:0.19.0" + dependencies: + comlink: ^4.4.1 + commander: ^10.0.1 + debug: ^4.3.4 + tslib: ^2.4.0 + bin: + bb.js: dest/node/main.js + checksum: c78c22c3b8c43e0010a43145f973489aa7da9fcf0b8527884107f1f34ac3ca5f5d4ab087d74ce50cb75d6dbaef88bfc693e23745282faa30b81dc78481c65874 + languageName: node + linkType: hard + "@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.10.4, @babel/code-frame@npm:^7.12.11, @babel/code-frame@npm:^7.16.0, @babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5, @babel/code-frame@npm:^7.8.3": version: 7.23.5 resolution: "@babel/code-frame@npm:7.23.5" @@ -1800,6 +1814,462 @@ __metadata: languageName: node linkType: hard +"@cspell/cspell-bundled-dicts@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-bundled-dicts@npm:8.3.2" + dependencies: + "@cspell/dict-ada": ^4.0.2 + "@cspell/dict-aws": ^4.0.1 + "@cspell/dict-bash": ^4.1.3 + "@cspell/dict-companies": ^3.0.29 + "@cspell/dict-cpp": ^5.0.10 + "@cspell/dict-cryptocurrencies": ^5.0.0 + "@cspell/dict-csharp": ^4.0.2 + "@cspell/dict-css": ^4.0.12 + "@cspell/dict-dart": ^2.0.3 + "@cspell/dict-django": ^4.1.0 + "@cspell/dict-docker": ^1.1.7 + "@cspell/dict-dotnet": ^5.0.0 + "@cspell/dict-elixir": ^4.0.3 + "@cspell/dict-en-common-misspellings": ^2.0.0 + "@cspell/dict-en-gb": 1.1.33 + "@cspell/dict-en_us": ^4.3.13 + "@cspell/dict-filetypes": ^3.0.3 + "@cspell/dict-fonts": ^4.0.0 + "@cspell/dict-fsharp": ^1.0.1 + "@cspell/dict-fullstack": ^3.1.5 + "@cspell/dict-gaming-terms": ^1.0.4 + "@cspell/dict-git": ^3.0.0 + "@cspell/dict-golang": ^6.0.5 + "@cspell/dict-haskell": ^4.0.1 + "@cspell/dict-html": ^4.0.5 + "@cspell/dict-html-symbol-entities": ^4.0.0 + "@cspell/dict-java": ^5.0.6 + "@cspell/dict-k8s": ^1.0.2 + "@cspell/dict-latex": ^4.0.0 + "@cspell/dict-lorem-ipsum": ^4.0.0 + "@cspell/dict-lua": ^4.0.3 + "@cspell/dict-makefile": ^1.0.0 + "@cspell/dict-node": ^4.0.3 + "@cspell/dict-npm": ^5.0.14 + "@cspell/dict-php": ^4.0.5 + "@cspell/dict-powershell": ^5.0.3 + "@cspell/dict-public-licenses": ^2.0.5 + "@cspell/dict-python": ^4.1.11 + "@cspell/dict-r": ^2.0.1 + "@cspell/dict-ruby": ^5.0.2 + "@cspell/dict-rust": ^4.0.1 + "@cspell/dict-scala": ^5.0.0 + "@cspell/dict-software-terms": ^3.3.15 + "@cspell/dict-sql": ^2.1.3 + "@cspell/dict-svelte": ^1.0.2 + "@cspell/dict-swift": ^2.0.1 + "@cspell/dict-typescript": ^3.1.2 + "@cspell/dict-vue": ^3.0.0 + checksum: 2b50d8a3d056a6e261e4d525fe8f70b8d4e9860c305f6ba21ea7869807b2b8100b30d6f4ff4da91ea0ac6fcea7191158c7091e839b3b41ee3052f3e3b401b347 + languageName: node + linkType: hard + +"@cspell/cspell-json-reporter@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-json-reporter@npm:8.3.2" + dependencies: + "@cspell/cspell-types": 8.3.2 + checksum: 25926f8cfef378dbce59f140d9f383db4598222a3de5f2a0a6840e225d694afce2c069498668304f4ec39e58f5570887ef0d31f45824ca16b0fc31f20180352e + languageName: node + linkType: hard + +"@cspell/cspell-pipe@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-pipe@npm:8.3.2" + checksum: 14b01d2cdc6af931a0b5ac9fcd5e5c358fcdd7710ed82d35a9dcf6c7d3e917bc5ac953571ec9fffa4ed22d8f872e2a74a10258cccac10b7cd49878c8c2e15a3c + languageName: node + linkType: hard + +"@cspell/cspell-resolver@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-resolver@npm:8.3.2" + dependencies: + global-directory: ^4.0.1 + checksum: fe723a1b8407b4168f6262cd30a97ad04eb5eb40f4cac3a11c6bf674ed67cc08525b0282b95edbcaf1d014dec3880f6f01b3f49e4319ade5b8e169f0a910f8d1 + languageName: node + linkType: hard + +"@cspell/cspell-service-bus@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-service-bus@npm:8.3.2" + checksum: 9bec7ddafcd8acab743248eb547fa5a84cf5363b0d0da354fdb2a0a6268a516b3600c7034ea71b6fca1c2517818f939876d961a4a3fc73a175eb62f5cbb6b7ae + languageName: node + linkType: hard + +"@cspell/cspell-types@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/cspell-types@npm:8.3.2" + checksum: 5f0b038e6717a3fa9c4109845c94e2a3d561a8b1a5dac073535929dee81e4d6d18af96759135a4dab6b06529a86ab1bd4e6b4a70121561bf22dfe4508140a5c3 + languageName: node + linkType: hard + +"@cspell/dict-ada@npm:^4.0.2": + version: 4.0.2 + resolution: "@cspell/dict-ada@npm:4.0.2" + checksum: 847729d40022db4df698aa9511c6b9073954f71268b64ad4fa354e6ac3eb5b03486bcb566ecadd4bccbebb4f188752eff2b2bdd9021b58dbf2cd61cd6a426752 + languageName: node + linkType: hard + +"@cspell/dict-aws@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-aws@npm:4.0.1" + checksum: 8445468151205fdfc51993ebc329e3a210a1628069971f973fb87c4d32cbb33676b32a370021efe0863ef63414632c8edf025ba7d296e932a93f3b05ba6193fc + languageName: node + linkType: hard + +"@cspell/dict-bash@npm:^4.1.3": + version: 4.1.3 + resolution: "@cspell/dict-bash@npm:4.1.3" + checksum: 4ba66c76c144d4c7ea1dd0fb92dfb0d7fd1e43a106a73fc7e9010b4a5c276aa4ef791c7161f56bf911356e3667ba043ee63271c1ffc485d9f8712553770e3ea9 + languageName: node + linkType: hard + +"@cspell/dict-companies@npm:^3.0.29": + version: 3.0.29 + resolution: "@cspell/dict-companies@npm:3.0.29" + checksum: ba2ee3fc7942ff932316038dc2a59b0edcf48321d9f31b75e1858637bd51be9b00bdafc2f3b70fa9d7e78cfbd016e08ef782cdf50bfc76b51e1bf22b1340e286 + languageName: node + linkType: hard + +"@cspell/dict-cpp@npm:^5.0.10": + version: 5.1.1 + resolution: "@cspell/dict-cpp@npm:5.1.1" + checksum: 2991434d97882c884ebef48d700ed80f1d0136b5dda9f97f49763874ba061103336fa97c07581ca73f23176030702778a93b43af49e73899f027c2b3263cdb6e + languageName: node + linkType: hard + +"@cspell/dict-cryptocurrencies@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-cryptocurrencies@npm:5.0.0" + checksum: 116e7f117b59ea4c9fa7ae1c3b47fc963e050448d43e059fb93731a256881ee262420edd5b9701ffe88af3d5e95b0337fc99b4dde1b0283ee0aaed45b23e281e + languageName: node + linkType: hard + +"@cspell/dict-csharp@npm:^4.0.2": + version: 4.0.2 + resolution: "@cspell/dict-csharp@npm:4.0.2" + checksum: d2ecb2aada51c5f0d6d557fd4f0c6eddb5b299e0955e066c49cd2afe96a1c6fe0afde699fdb885dd3183603a1efbd1d793b6a490b8d039256445b4b154b7375b + languageName: node + linkType: hard + +"@cspell/dict-css@npm:^4.0.12": + version: 4.0.12 + resolution: "@cspell/dict-css@npm:4.0.12" + checksum: 208c9434b8f5c8a33a96bb087572c10d5c946cd0847b9439271d0c4d2dcde5ee2588aca73bfea0c868d0124731b3ca890fab4762724d16435f161d4d5e7f3b9b + languageName: node + linkType: hard + +"@cspell/dict-dart@npm:^2.0.3": + version: 2.0.3 + resolution: "@cspell/dict-dart@npm:2.0.3" + checksum: 66bfcfa029baacd0b14b3ff5b6ab7597cf9459f77185d88b25123b42a4babb66df6786806843f1b6506c335326100599a2e1db6e6104e66bd021ede9ccb3cec4 + languageName: node + linkType: hard + +"@cspell/dict-data-science@npm:^1.0.11": + version: 1.0.11 + resolution: "@cspell/dict-data-science@npm:1.0.11" + checksum: 513f8f416f584f46576d45be23a4aa354e46d244f10a3d466222ffc13afe475e676639e4a24ab3a1ba157239f9ce23f7eef59c9f4c7a877a044db3a6344b18c6 + languageName: node + linkType: hard + +"@cspell/dict-django@npm:^4.1.0": + version: 4.1.0 + resolution: "@cspell/dict-django@npm:4.1.0" + checksum: b8a66135525e235bd6f2ea02de84ac7eae78e1068418f36b0c2260f9516b72492ef73f3fdc5fe8db2a6933747ff45a3eb743423a7dbf5b74548b1b1f30792679 + languageName: node + linkType: hard + +"@cspell/dict-docker@npm:^1.1.7": + version: 1.1.7 + resolution: "@cspell/dict-docker@npm:1.1.7" + checksum: 307f8b5132edca7cd291ba0ab6ed88f8787df984d6a42401b12a0da1ecb935d50af3a108ede885ce5bede96c445acdc88bb9ea8396de151c565a90a3bf66853e + languageName: node + linkType: hard + +"@cspell/dict-dotnet@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-dotnet@npm:5.0.0" + checksum: 3e55abd3cc0ecb0924caa245b83595c8e90b42a8fb438f3294d06ad32d30f3235dc8943a2865f06eaec5285a8d6a7df1db71fb228753d56a678a0f0cff87c24c + languageName: node + linkType: hard + +"@cspell/dict-elixir@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-elixir@npm:4.0.3" + checksum: f084449b2de5a2fa08076ac699c6073beaa4bb43796a662d681ea8fe5cba31f9efe718f3f98ef432ba75d4ea574316de34ab8422f79f4f2022cfddee7a7b8653 + languageName: node + linkType: hard + +"@cspell/dict-en-common-misspellings@npm:^2.0.0": + version: 2.0.0 + resolution: "@cspell/dict-en-common-misspellings@npm:2.0.0" + checksum: 977aac18d737d88e4cdca0771b664078a2d8cde1a0313696882581e67cb1acbf1f6a3923c1ee3f05878cfe3ad6f063a2e451f33e7df61cd1e0eb5de425bb4f2d + languageName: node + linkType: hard + +"@cspell/dict-en-gb@npm:1.1.33": + version: 1.1.33 + resolution: "@cspell/dict-en-gb@npm:1.1.33" + checksum: 09a9e7a3ee4cad75c87cc7adf6b5981b3ec52d4e3707e8de2e1a2a55cd5c8539057a7742d9c7035e23eb0aeff80a95b9599696c7192c9b3b9d8f14440fe01938 + languageName: node + linkType: hard + +"@cspell/dict-en_us@npm:^4.3.13": + version: 4.3.13 + resolution: "@cspell/dict-en_us@npm:4.3.13" + checksum: 4fc0d8458f3760a436956870742207214c73ae97da294117cae4fa8cf174b40dd6d237f8cb5d1bd4d8f82e3bfbd98cf885a9d79da1823f354d92a2724090a4f2 + languageName: node + linkType: hard + +"@cspell/dict-filetypes@npm:^3.0.3": + version: 3.0.3 + resolution: "@cspell/dict-filetypes@npm:3.0.3" + checksum: 22c38a0b2e98d6223b364ddb5948d14bf6427c8286d4ddb111d5da9bdd4c47ddc0c9199a575c314142da9aefcaa5777a4ea33ac07f239cb4b9b303e4bd888aa1 + languageName: node + linkType: hard + +"@cspell/dict-fonts@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-fonts@npm:4.0.0" + checksum: 7e33e4b39fb071165d81920dd0ccc07e4b737a7b09522acf3781b26136526e445e03e456caaecff261d76b711196b84cff7c21293853bf00ebe93f2e64c42520 + languageName: node + linkType: hard + +"@cspell/dict-fsharp@npm:^1.0.1": + version: 1.0.1 + resolution: "@cspell/dict-fsharp@npm:1.0.1" + checksum: ce0df20704bf95d1fe434d2889cc764279cbce2b057fc5247be1ccaf7a8cc57372de3da2cdab6643b3df5221119716929b2e2aaad3f60533dcf0bd3c7d892fab + languageName: node + linkType: hard + +"@cspell/dict-fullstack@npm:^3.1.5": + version: 3.1.5 + resolution: "@cspell/dict-fullstack@npm:3.1.5" + checksum: 01c98a3408d4bf4832f1f110252399e663ce869bb097d681558828bb0e22725c7fe7b43077aa57afc2c3158515eaa744074826c020825af5856a0950219785a6 + languageName: node + linkType: hard + +"@cspell/dict-gaming-terms@npm:^1.0.4": + version: 1.0.4 + resolution: "@cspell/dict-gaming-terms@npm:1.0.4" + checksum: 3e57f5567747a8598b3e4de4f63a3b8090cccf7688f3e91f0a9e005e916645db1224ea600afd5b497b7e8c6a1f9291dfd4cb932278dfd423657107203a2ace0b + languageName: node + linkType: hard + +"@cspell/dict-git@npm:^3.0.0": + version: 3.0.0 + resolution: "@cspell/dict-git@npm:3.0.0" + checksum: 97b6da58c93108bae0867515f790d84728f0bce580cc8ad6f0f5f63b2c81eaf6d084d543d99b693ff4d7fbea2413ff068c3e4811fc107820d243da2c06d381fa + languageName: node + linkType: hard + +"@cspell/dict-golang@npm:^6.0.5": + version: 6.0.5 + resolution: "@cspell/dict-golang@npm:6.0.5" + checksum: 20bf2c6a23d26f23e39629f3a48c31c2993d126b03ca31892e4e03ed48d2f5d5d929675987df54b6dad95828f6baa080111167e81a2dc3836c1f5b0b6db04a56 + languageName: node + linkType: hard + +"@cspell/dict-haskell@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-haskell@npm:4.0.1" + checksum: cfb51e415b60c5eb266a5782d0a4b19a37f1389b9b018d1bbb2ff4358bd739af1f76f68f26a138d4b4bd0ab67146d6eb9032fc3d3c212695237c134e05339c79 + languageName: node + linkType: hard + +"@cspell/dict-html-symbol-entities@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-html-symbol-entities@npm:4.0.0" + checksum: 79f05f9080f39dbde703980eb587ed6624b8fc2f5cedc297327bc1b9b7e6022a7c382e6013149b1afe00609b96003ab5c8d18d378979f76f336ab626317183f4 + languageName: node + linkType: hard + +"@cspell/dict-html@npm:^4.0.5": + version: 4.0.5 + resolution: "@cspell/dict-html@npm:4.0.5" + checksum: 2273e77cad6f373c4b0a43c5fb707725ff7c845e6de3545c8b05cbb2d82e1205f004a817498f561ced3d8d8b1d15848a82f2a667c0b64a1ee46cbde67d8ac136 + languageName: node + linkType: hard + +"@cspell/dict-java@npm:^5.0.6": + version: 5.0.6 + resolution: "@cspell/dict-java@npm:5.0.6" + checksum: 7d5df7831e8a4c0295ebbcfdf9a3185b197f1aa60ec85f7edf4aaec4cc1c53f37a291f3f95543642951d21ff487285f5cfb6e853b7f96f514a35052d59252909 + languageName: node + linkType: hard + +"@cspell/dict-k8s@npm:^1.0.2": + version: 1.0.2 + resolution: "@cspell/dict-k8s@npm:1.0.2" + checksum: 4afd7806033b2bf71b17f4cf3fbc33449492bfb2a33a8301cc97b2e55583c07a4a07c288f50f445261c1de4b95494e495b8b982ca428d285393f7eb917bb5a61 + languageName: node + linkType: hard + +"@cspell/dict-latex@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-latex@npm:4.0.0" + checksum: 33a3f158d8c0151cbb4e6bd79ba1189d167b3916e1ce37d7b5754d18dffefe061320fa54c3cb482bd5c7cf37392d0112530b07a3eca63dffbe1069de317dc652 + languageName: node + linkType: hard + +"@cspell/dict-lorem-ipsum@npm:^4.0.0": + version: 4.0.0 + resolution: "@cspell/dict-lorem-ipsum@npm:4.0.0" + checksum: d3575fb7b9684480192d2cd647484312c555f3d1215d6b35371b70de3ecde4273010e5916cc2d130ff1e1223a1a49f75825651671a76d3dabdec98acf67a3902 + languageName: node + linkType: hard + +"@cspell/dict-lua@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-lua@npm:4.0.3" + checksum: eee20135a4f0620302c5feeb50485f59af93c24b64eb2081a9e2096a106ce33ae565d6d92607582b44c9f17c8a091d82fbcb443ebae9b77f8512b0d66a703c3b + languageName: node + linkType: hard + +"@cspell/dict-makefile@npm:^1.0.0": + version: 1.0.0 + resolution: "@cspell/dict-makefile@npm:1.0.0" + checksum: f0cac4caf31e27accd5df5e0c2f53097cccbbd085126c4b4ecc08be2a32bd7f89fe6b052e9eae4ec99843175cafa94561868271fb53c5389f27cc078565b5123 + languageName: node + linkType: hard + +"@cspell/dict-node@npm:^4.0.3": + version: 4.0.3 + resolution: "@cspell/dict-node@npm:4.0.3" + checksum: 178e7f3ab45f30722cae7354803dd98ea6577c025a11eda9362fa795a06dd8e934f833bfc7d46816617974822ace11217505a1bd0ea2955aaee92cf94cc6b127 + languageName: node + linkType: hard + +"@cspell/dict-npm@npm:^5.0.14": + version: 5.0.14 + resolution: "@cspell/dict-npm@npm:5.0.14" + checksum: 791a23e8b6a726d77a9098f25b582e6a511be699f878526a036f9f0ec08ccf0c2082ec53a5ee056b59d3a22fe5f21e8e3d11e5c839a10e17cb919c8c71f74537 + languageName: node + linkType: hard + +"@cspell/dict-php@npm:^4.0.5": + version: 4.0.5 + resolution: "@cspell/dict-php@npm:4.0.5" + checksum: 017375f8a08f93872672b9d14c885b3114ea1b82cb8a4375029c7e71dcb303b82bf684fc84df3cbd8d74b9d1e74c80234280605ef8ca26a7972d99bbf15b14f9 + languageName: node + linkType: hard + +"@cspell/dict-powershell@npm:^5.0.3": + version: 5.0.3 + resolution: "@cspell/dict-powershell@npm:5.0.3" + checksum: 18eac3be8545b3df110bf867bd6285b11d7e67da037e00c9bc1376c5e322092bc1d925375a09df8b7420a6a35847aa20558610ffb491763eb82949f3af764e1d + languageName: node + linkType: hard + +"@cspell/dict-public-licenses@npm:^2.0.5": + version: 2.0.5 + resolution: "@cspell/dict-public-licenses@npm:2.0.5" + checksum: 07e647c24ed1a5f0e88828264581e9f8fde179f776a50c4389ac0bc5c09a2bb94280d299c94d1884a8da01fbf112c5640178789b457f06b1a97414b18cda99e0 + languageName: node + linkType: hard + +"@cspell/dict-python@npm:^4.1.11": + version: 4.1.11 + resolution: "@cspell/dict-python@npm:4.1.11" + dependencies: + "@cspell/dict-data-science": ^1.0.11 + checksum: cdfc493d3a260887dc31b115ab880849895522c4a0c0d4fe379a2e42ebf41f0724f02f778629e3e972fc6fa93e6d7bc7892e4b26b3b6fbf72399cf5d3b67585b + languageName: node + linkType: hard + +"@cspell/dict-r@npm:^2.0.1": + version: 2.0.1 + resolution: "@cspell/dict-r@npm:2.0.1" + checksum: fe85939ad4c8ada34284a673918be711cca60b6d6f1c48ee98602c27905228dfbaea3462a350094633032c1d6b6bba9548df7019e0b21673cf1cf887c57ca228 + languageName: node + linkType: hard + +"@cspell/dict-ruby@npm:^5.0.2": + version: 5.0.2 + resolution: "@cspell/dict-ruby@npm:5.0.2" + checksum: c2006bcc808448b1eef146eb4b6b74388113c50334206191a9fe5817fb13669482ecd114f7bbd397562ad2e19a9683266ff396f48c6ce282f6445c2cfa8e82c7 + languageName: node + linkType: hard + +"@cspell/dict-rust@npm:^4.0.1": + version: 4.0.1 + resolution: "@cspell/dict-rust@npm:4.0.1" + checksum: 146d3af5d0b1b84ec62059353416cd5d4b53995ed0a0edb47b96ed89f1b8b82881e76c1bac46311318f41d1619eab87d81e0cdc94855f50b79cfa0719333cbb1 + languageName: node + linkType: hard + +"@cspell/dict-scala@npm:^5.0.0": + version: 5.0.0 + resolution: "@cspell/dict-scala@npm:5.0.0" + checksum: 759dd8746e68e45299b65eeaf1dfd32d1e345fd80fd9a623af502266598c384198853001f70a700c454d8490fb9a5e1358ca1e0d7c0d43154a4f07e2d5531c72 + languageName: node + linkType: hard + +"@cspell/dict-software-terms@npm:^3.3.15": + version: 3.3.16 + resolution: "@cspell/dict-software-terms@npm:3.3.16" + checksum: 85ae16ecb3d4594da37d199113fdf3b77038cfe8d3b9dbaea248c597e484e45a9fcf42d4c234d4548911caace037f01d5f8442a787348dddea5777c8b023d441 + languageName: node + linkType: hard + +"@cspell/dict-sql@npm:^2.1.3": + version: 2.1.3 + resolution: "@cspell/dict-sql@npm:2.1.3" + checksum: a435812cc697d4c453f11efa49962992150702518e49808381ea34548b8a8ed81432a10cca36682007912b013c28e9ce3c6c183341c6cde58c8af0eef25cddc3 + languageName: node + linkType: hard + +"@cspell/dict-svelte@npm:^1.0.2": + version: 1.0.2 + resolution: "@cspell/dict-svelte@npm:1.0.2" + checksum: 5b42989bc6743a26ca5172cc23ebc1449d930695b10c908376048ce1835bf57fef7a0004f02ec5e43219f24a97f154e125041df470441199a045ed0be9e654fc + languageName: node + linkType: hard + +"@cspell/dict-swift@npm:^2.0.1": + version: 2.0.1 + resolution: "@cspell/dict-swift@npm:2.0.1" + checksum: 0bbb106266205c5f5e12886a73ebf0db2078bab1bdd2e1f304fe28445cd72d847a4c5072bf4fe8f9e8cdb4bc69d52fffec0806aea19ea9b64b7a87c67ee01175 + languageName: node + linkType: hard + +"@cspell/dict-typescript@npm:^3.1.2": + version: 3.1.2 + resolution: "@cspell/dict-typescript@npm:3.1.2" + checksum: 3cd0fa39856002975cf05b5584de42f58700a3420bd08b7b073af87dbcc1f66cab45e36a2156dd4e6c926cf67baaa192f02ccf61b9a9e5e94e69e035af54cec1 + languageName: node + linkType: hard + +"@cspell/dict-vue@npm:^3.0.0": + version: 3.0.0 + resolution: "@cspell/dict-vue@npm:3.0.0" + checksum: 4db58b1d6f9be1a523a35678877f2cca2bb04548b136ec5ec4e7186500978dbc32cc8747ced80ade3cad3acc3c80eb23afe980679165810f8f8f26802e952e2f + languageName: node + linkType: hard + +"@cspell/dynamic-import@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/dynamic-import@npm:8.3.2" + dependencies: + import-meta-resolve: ^4.0.0 + checksum: 176a5684922e9d3b3b277cdacbac14da509f691bd62da5135fab7c75db57f41c52ef0386c6ba3958e48dbc1b7b6f2751067c63bc1167ee09fae427c86dac71f7 + languageName: node + linkType: hard + +"@cspell/strong-weak-map@npm:8.3.2": + version: 8.3.2 + resolution: "@cspell/strong-weak-map@npm:8.3.2" + checksum: 696389c5ecb985bdded3530a088712ad03354051efc6506fae0328353a476604008a03364f3736de9041f51128b46ff6ffdda628c03c2c4800dc256a1b75e6ba + languageName: node + linkType: hard + "@cspotcode/source-map-support@npm:^0.8.0": version: 0.8.1 resolution: "@cspotcode/source-map-support@npm:0.8.1" @@ -1809,7 +2279,7 @@ __metadata: languageName: node linkType: hard -"@discoveryjs/json-ext@npm:0.5.7": +"@discoveryjs/json-ext@npm:0.5.7, @discoveryjs/json-ext@npm:^0.5.0": version: 0.5.7 resolution: "@discoveryjs/json-ext@npm:0.5.7" checksum: 2176d301cc258ea5c2324402997cf8134ebb212469c0d397591636cea8d3c02f2b3cf9fd58dcb748c7a0dade77ebdc1b10284fa63e608c033a1db52fddc69918 @@ -3601,6 +4071,13 @@ __metadata: languageName: node linkType: hard +"@ltd/j-toml@npm:^1.38.0": + version: 1.38.0 + resolution: "@ltd/j-toml@npm:1.38.0" + checksum: 34f5d0ec652e790a7a733f0d3a8d9957d63997bd0efc13a61beb9d772bae75519453884fbc3fd6a2d5fe15674834bdd57ca1824bb1de8f829e5ce195fc5fa3ea + languageName: node + linkType: hard + "@mdn/browser-compat-data@npm:^4.0.0": version: 4.2.1 resolution: "@mdn/browser-compat-data@npm:4.2.1" @@ -3918,10 +4395,10 @@ __metadata: languageName: node linkType: hard -"@noir-lang/acvm_js@npm:0.35.0": - version: 0.35.0 - resolution: "@noir-lang/acvm_js@npm:0.35.0" - checksum: 0a2209f60242d9dbef31e72b738d70ff802da71f12d4b4f278144d373941f30844c8d041518970e71ba82eb9337cf4cc3a40fbd4b2c50acff45d84d2ee7dd435 +"@noir-lang/acvm_js@npm:0.38.0": + version: 0.38.0 + resolution: "@noir-lang/acvm_js@npm:0.38.0" + checksum: 42a5bba45135d1df0d0eb3f7b65439733e016580bad610e859e140638d42200d6b856ff11c4b30417b74ce011da7c39861aafb1c5b8c7211de2172aea449c635 languageName: node linkType: hard @@ -3943,14 +4420,14 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/backend_barretenberg@npm:^0.19.2": - version: 0.19.4 - resolution: "@noir-lang/backend_barretenberg@npm:0.19.4" +"@noir-lang/backend_barretenberg@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/backend_barretenberg@npm:0.22.0" dependencies: "@aztec/bb.js": 0.16.0 - "@noir-lang/types": 0.19.4 + "@noir-lang/types": 0.22.0 fflate: ^0.8.0 - checksum: b84382e71f29f0e10415f8c3a68eb7bb0653f56df1b3e8afd66c22df7adc950fb9016124f32253abad3d05583bccf26b1d0c0cabc601218083fd27d0601a3c02 + checksum: ead456218ba61d925e0fc5b47d1b94272e980b44a220f1262fb6cdc73cff7cd4232ddc69dd67bb21e50f0b43e7696d4a96fde15e3eadc0bf223ec6d59e014e23 languageName: node linkType: hard @@ -3958,7 +4435,7 @@ __metadata: version: 0.0.0-use.local resolution: "@noir-lang/backend_barretenberg@workspace:tooling/noir_js_backend_barretenberg" dependencies: - "@aztec/bb.js": 0.16.0 + "@aztec/bb.js": 0.19.0 "@noir-lang/types": "workspace:*" "@types/node": ^20.6.2 "@types/prettier": ^3 @@ -3980,7 +4457,6 @@ __metadata: "@noir-lang/noir_js": "workspace:*" "@noir-lang/types": "workspace:*" "@types/chai": ^4 - "@types/lodash": ^4 "@types/mocha": ^10.0.1 "@types/node": ^20.6.2 "@types/prettier": ^3 @@ -3988,7 +4464,6 @@ __metadata: eslint: ^8.50.0 eslint-plugin-prettier: ^5.0.0 glob: ^10.3.10 - lodash: ^4.17.21 mocha: ^10.2.0 prettier: 3.0.3 ts-command-line-args: ^2.5.1 @@ -4000,14 +4475,14 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/noir_js@npm:^0.19.2": - version: 0.19.4 - resolution: "@noir-lang/noir_js@npm:0.19.4" +"@noir-lang/noir_js@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noir_js@npm:0.22.0" dependencies: - "@noir-lang/acvm_js": 0.35.0 - "@noir-lang/noirc_abi": 0.19.4 - "@noir-lang/types": 0.19.4 - checksum: 1ee6140019861cee3a27d511951e6b3ed629925a83328b0fbe18791a534c36d26d33ae3ca3aa25252fac8932dfeefedc0d5f07169d5d08f1485d6290965d2491 + "@noir-lang/acvm_js": 0.38.0 + "@noir-lang/noirc_abi": 0.22.0 + "@noir-lang/types": 0.22.0 + checksum: 3b0873ad87521415af11208bebe5690191d03fa06dcd515789f0a63f7641146cdcb01d292b208452856ea3967e196c8332cb2618e013f9e7e5ce7d6e09de043d languageName: node linkType: hard @@ -4034,12 +4509,10 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/noir_wasm@npm:^0.19.2": - version: 0.19.4 - resolution: "@noir-lang/noir_wasm@npm:0.19.4" - peerDependencies: - "@noir-lang/source-resolver": 0.19.4 - checksum: b69748240e27851f19b20b4ce1d0c57da7e9055b9d0e0c16f699338394912c00277e5586afcff77d3f6f08e17bbf1bc6253ff1ad156b8f4985c4435adad0ba49 +"@noir-lang/noir_wasm@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noir_wasm@npm:0.22.0" + checksum: 7ac0ca170bf312df761d7ccfd32a67a27f88f15ad4eed1807864295d761d3b2176ffb82f4c6931e1bc06b225d6f738519962c79ffbce9a33d5ef8a6a2bdea82c languageName: node linkType: hard @@ -4048,17 +4521,49 @@ __metadata: resolution: "@noir-lang/noir_wasm@workspace:compiler/wasm" dependencies: "@esm-bundle/chai": ^4.3.4-fix.0 + "@ltd/j-toml": ^1.38.0 + "@noir-lang/noirc_abi": "workspace:*" + "@types/adm-zip": ^0.5.0 + "@types/chai": ^4 + "@types/mocha": ^10.0.6 + "@types/mocha-each": ^2 + "@types/node": ^20.10.5 + "@types/pako": ^2 + "@types/path-browserify": ^1 + "@types/readable-stream": ^4 + "@types/sinon": ^17 + "@wasm-tool/wasm-pack-plugin": ^1.7.0 "@web/dev-server-esbuild": ^0.3.6 - "@web/test-runner": ^0.15.3 - "@web/test-runner-playwright": ^0.10.0 + "@web/test-runner": ^0.18.0 + "@web/test-runner-playwright": ^0.11.0 + adm-zip: ^0.5.0 + assert: ^2.1.0 + browserify-fs: ^1.0.0 + chai: ^4.3.10 + copy-webpack-plugin: ^11.0.0 + html-webpack-plugin: ^5.5.4 + memfs: ^4.6.0 mocha: ^10.2.0 + mocha-each: ^2.0.1 + pako: ^2.1.0 + path-browserify: ^1.0.1 + process: ^0.11.10 + readable-stream: ^4.4.2 + sinon: ^17.0.1 + ts-loader: ^9.5.1 + ts-node: ^10.9.1 + typescript: ~5.2.2 + unzipit: ^1.4.3 + url: ^0.11.3 + webpack: ^5.49.0 + webpack-cli: ^4.7.2 languageName: unknown linkType: soft -"@noir-lang/noirc_abi@npm:0.19.4": - version: 0.19.4 - resolution: "@noir-lang/noirc_abi@npm:0.19.4" - checksum: 7446127935c954ae1b2bfdfa6cf111a9c14a2024f13a2d7e7e0a612319b0b4cc199acdbceaa94168e3bd5d82760b9d2fd2c9eec6677d57ed76e7b666e2b28c7d +"@noir-lang/noirc_abi@npm:0.22.0": + version: 0.22.0 + resolution: "@noir-lang/noirc_abi@npm:0.22.0" + checksum: a250c6cc5ca37fcf02663f8d6b027776f0e58920fb8f8a84efcf74f079f235bb11bbad682ba332211d9b9a79b6a3eb7faede7701cd88582b682971a41ca6212d languageName: node linkType: hard @@ -4082,6 +4587,7 @@ __metadata: "@typescript-eslint/eslint-plugin": ^6.7.3 "@typescript-eslint/parser": ^6.7.3 chai: ^4.3.7 + cspell: ^8.3.2 eslint: ^8.50.0 eslint-plugin-prettier: ^5.0.0 mocha: ^10.2.0 @@ -4091,19 +4597,12 @@ __metadata: languageName: unknown linkType: soft -"@noir-lang/source-resolver@npm:^0.19.2": - version: 0.19.4 - resolution: "@noir-lang/source-resolver@npm:0.19.4" - checksum: f0e51bb54daa1197894a17e5d0d125481aa0872c49c0d955feec2629b8ff51b9b48144778def6539a87b723a9b4c5eacc79d6eb935f0da6a1893d8d403689683 - languageName: node - linkType: hard - -"@noir-lang/types@npm:0.19.4, @noir-lang/types@npm:^0.19.2": - version: 0.19.4 - resolution: "@noir-lang/types@npm:0.19.4" +"@noir-lang/types@npm:0.22.0, @noir-lang/types@npm:^0.22.0": + version: 0.22.0 + resolution: "@noir-lang/types@npm:0.22.0" dependencies: - "@noir-lang/noirc_abi": 0.19.4 - checksum: 1d0ce99895c0bc98e9daf67028132f71e30f60b2dc0799ee99f311d917e84459a0d2a37c318f5ab14e594e7dd26f5e612e5fd9a7e0a20a692a7017d7c883bb4d + "@noir-lang/noirc_abi": 0.22.0 + checksum: 5dd1badf0449c518e755172de1d2f2c1b95bfaf7b7328b7de00b8ce9ba68bd447ca65e827185da7d737e7e88dcaf296b29687ffe2e1f5b4d5cc31ce3e3b4f208 languageName: node linkType: hard @@ -4517,22 +5016,63 @@ __metadata: languageName: node linkType: hard -"@rollup/plugin-node-resolve@npm:^13.0.4": - version: 13.3.0 - resolution: "@rollup/plugin-node-resolve@npm:13.3.0" +"@puppeteer/browsers@npm:1.4.6": + version: 1.4.6 + resolution: "@puppeteer/browsers@npm:1.4.6" dependencies: - "@rollup/pluginutils": ^3.1.0 - "@types/resolve": 1.17.1 - deepmerge: ^4.2.2 - is-builtin-module: ^3.1.0 - is-module: ^1.0.0 - resolve: ^1.19.0 + debug: 4.3.4 + extract-zip: 2.0.1 + progress: 2.0.3 + proxy-agent: 6.3.0 + tar-fs: 3.0.4 + unbzip2-stream: 1.4.3 + yargs: 17.7.1 + peerDependencies: + typescript: ">= 4.7.4" + peerDependenciesMeta: + typescript: + optional: true + bin: + browsers: lib/cjs/main-cli.js + checksum: 29569dd8a8a41737bb0dd40cce6279cfc8764afc6242d2f9d8ae610bed7e466fc77eeb27b9b3ac53dd04927a1a0e26389f282f6ba057210979b36ab455009d64 + languageName: node + linkType: hard + +"@rollup/plugin-node-resolve@npm:^13.0.4": + version: 13.3.0 + resolution: "@rollup/plugin-node-resolve@npm:13.3.0" + dependencies: + "@rollup/pluginutils": ^3.1.0 + "@types/resolve": 1.17.1 + deepmerge: ^4.2.2 + is-builtin-module: ^3.1.0 + is-module: ^1.0.0 + resolve: ^1.19.0 peerDependencies: rollup: ^2.42.0 checksum: ec5418e6b3c23a9e30683056b3010e9d325316dcfae93fbc673ae64dad8e56a2ce761c15c48f5e2dcfe0c822fdc4a4905ee6346e3dcf90603ba2260afef5a5e6 languageName: node linkType: hard +"@rollup/plugin-node-resolve@npm:^15.0.1": + version: 15.2.3 + resolution: "@rollup/plugin-node-resolve@npm:15.2.3" + dependencies: + "@rollup/pluginutils": ^5.0.1 + "@types/resolve": 1.20.2 + deepmerge: ^4.2.2 + is-builtin-module: ^3.2.1 + is-module: ^1.0.0 + resolve: ^1.22.1 + peerDependencies: + rollup: ^2.78.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 730f32c2f8fdddff07cf0fca86a5dac7c475605fb96930197a868c066e62eb6388c557545e4f7d99b7a283411754c9fbf98944ab086b6074e04fc1292e234aa8 + languageName: node + linkType: hard + "@rollup/pluginutils@npm:^3.1.0": version: 3.1.0 resolution: "@rollup/pluginutils@npm:3.1.0" @@ -4546,6 +5086,113 @@ __metadata: languageName: node linkType: hard +"@rollup/pluginutils@npm:^5.0.1": + version: 5.1.0 + resolution: "@rollup/pluginutils@npm:5.1.0" + dependencies: + "@types/estree": ^1.0.0 + estree-walker: ^2.0.2 + picomatch: ^2.3.1 + peerDependencies: + rollup: ^1.20.0||^2.0.0||^3.0.0||^4.0.0 + peerDependenciesMeta: + rollup: + optional: true + checksum: 3cc5a6d91452a6eabbfd1ae79b4dd1f1e809d2eecda6e175deb784e75b0911f47e9ecce73f8dd315d6a8b3f362582c91d3c0f66908b6ced69345b3cbe28f8ce8 + languageName: node + linkType: hard + +"@rollup/rollup-android-arm-eabi@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-android-arm-eabi@npm:4.9.4" + conditions: os=android & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-android-arm64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-android-arm64@npm:4.9.4" + conditions: os=android & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-arm64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-darwin-arm64@npm:4.9.4" + conditions: os=darwin & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-darwin-x64@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-darwin-x64@npm:4.9.4" + conditions: os=darwin & cpu=x64 + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm-gnueabihf@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.9.4" + conditions: os=linux & cpu=arm + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.9.4" + conditions: os=linux & cpu=arm64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-arm64-musl@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-arm64-musl@npm:4.9.4" + conditions: os=linux & cpu=arm64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-linux-riscv64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.9.4" + conditions: os=linux & cpu=riscv64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-gnu@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-x64-gnu@npm:4.9.4" + conditions: os=linux & cpu=x64 & libc=glibc + languageName: node + linkType: hard + +"@rollup/rollup-linux-x64-musl@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-linux-x64-musl@npm:4.9.4" + conditions: os=linux & cpu=x64 & libc=musl + languageName: node + linkType: hard + +"@rollup/rollup-win32-arm64-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.9.4" + conditions: os=win32 & cpu=arm64 + languageName: node + linkType: hard + +"@rollup/rollup-win32-ia32-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.9.4" + conditions: os=win32 & cpu=ia32 + languageName: node + linkType: hard + +"@rollup/rollup-win32-x64-msvc@npm:4.9.4": + version: 4.9.4 + resolution: "@rollup/rollup-win32-x64-msvc@npm:4.9.4" + conditions: os=win32 & cpu=x64 + languageName: node + linkType: hard + "@scure/base@npm:~1.1.0": version: 1.1.3 resolution: "@scure/base@npm:1.1.3" @@ -4679,16 +5326,15 @@ __metadata: languageName: node linkType: hard -"@signorecello/noir_playground@npm:^0.6.0": - version: 0.6.0 - resolution: "@signorecello/noir_playground@npm:0.6.0" +"@signorecello/noir_playground@npm:^0.7.0": + version: 0.7.0 + resolution: "@signorecello/noir_playground@npm:0.7.0" dependencies: "@monaco-editor/react": 4.6.0 - "@noir-lang/backend_barretenberg": ^0.19.2 - "@noir-lang/noir_js": ^0.19.2 - "@noir-lang/noir_wasm": ^0.19.2 - "@noir-lang/source-resolver": ^0.19.2 - "@noir-lang/types": ^0.19.2 + "@noir-lang/backend_barretenberg": ^0.22.0 + "@noir-lang/noir_js": ^0.22.0 + "@noir-lang/noir_wasm": ^0.22.0 + "@noir-lang/types": ^0.22.0 fflate: ^0.8.1 js-base64: ^3.7.5 monaco-editor: ^0.44.0 @@ -4696,7 +5342,7 @@ __metadata: monaco-textmate: ^3.0.1 onigasm: ^2.2.5 react-toastify: ^9.1.3 - checksum: a1fa6f5b6aded80ad8ecdf8841dbbcd70e2a3faa479bc39f8077d3e96d65ceab53488f278db1a991a6cc8c6ce63206cc1627fe87e683187941359251e67c48ed + checksum: 360bd1dbc8964a6ab8a6e8d0eb0cd11d7446cc23bf63c253083b18b5d6d5ccf2ec6ca847614106cd93490bb815aac651a6e4584ac63ea0fda182cdb1aadf3f45 languageName: node linkType: hard @@ -4728,6 +5374,60 @@ __metadata: languageName: node linkType: hard +"@sinonjs/commons@npm:^2.0.0": + version: 2.0.0 + resolution: "@sinonjs/commons@npm:2.0.0" + dependencies: + type-detect: 4.0.8 + checksum: 5023ba17edf2b85ed58262313b8e9b59e23c6860681a9af0200f239fe939e2b79736d04a260e8270ddd57196851dde3ba754d7230be5c5234e777ae2ca8af137 + languageName: node + linkType: hard + +"@sinonjs/commons@npm:^3.0.0": + version: 3.0.0 + resolution: "@sinonjs/commons@npm:3.0.0" + dependencies: + type-detect: 4.0.8 + checksum: b4b5b73d4df4560fb8c0c7b38c7ad4aeabedd362f3373859d804c988c725889cde33550e4bcc7cd316a30f5152a2d1d43db71b6d0c38f5feef71fd8d016763f8 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^10.0.2": + version: 10.3.0 + resolution: "@sinonjs/fake-timers@npm:10.3.0" + dependencies: + "@sinonjs/commons": ^3.0.0 + checksum: 614d30cb4d5201550c940945d44c9e0b6d64a888ff2cd5b357f95ad6721070d6b8839cd10e15b76bf5e14af0bcc1d8f9ec00d49a46318f1f669a4bec1d7f3148 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^11.2.2": + version: 11.2.2 + resolution: "@sinonjs/fake-timers@npm:11.2.2" + dependencies: + "@sinonjs/commons": ^3.0.0 + checksum: 68c29b0e1856fdc280df03ddbf57c726420b78e9f943a241b471edc018fb14ff36fdc1daafd6026cba08c3c7f50c976fb7ae11b88ff44cd7f609692ca7d25158 + languageName: node + linkType: hard + +"@sinonjs/samsam@npm:^8.0.0": + version: 8.0.0 + resolution: "@sinonjs/samsam@npm:8.0.0" + dependencies: + "@sinonjs/commons": ^2.0.0 + lodash.get: ^4.4.2 + type-detect: ^4.0.8 + checksum: 95e40d0bb9f7288e27c379bee1b03c3dc51e7e78b9d5ea6aef66a690da7e81efc4715145b561b449cefc5361a171791e3ce30fb1a46ab247d4c0766024c60a60 + languageName: node + linkType: hard + +"@sinonjs/text-encoding@npm:^0.7.1": + version: 0.7.2 + resolution: "@sinonjs/text-encoding@npm:0.7.2" + checksum: fe690002a32ba06906cf87e2e8fe84d1590294586f2a7fd180a65355b53660c155c3273d8011a5f2b77209b819aa7306678ae6e4aea0df014bd7ffd4bbbcf1ab + languageName: node + linkType: hard + "@slorber/remark-comment@npm:^1.0.0": version: 1.0.0 resolution: "@slorber/remark-comment@npm:1.0.0" @@ -4924,6 +5624,13 @@ __metadata: languageName: node linkType: hard +"@tootallnate/quickjs-emscripten@npm:^0.23.0": + version: 0.23.0 + resolution: "@tootallnate/quickjs-emscripten@npm:0.23.0" + checksum: c350a2947ffb80b22e14ff35099fd582d1340d65723384a0fd0515e905e2534459ad2f301a43279a37308a27c99273c932e64649abd57d0bb3ca8c557150eccc + languageName: node + linkType: hard + "@trysound/sax@npm:0.2.0": version: 0.2.0 resolution: "@trysound/sax@npm:0.2.0" @@ -4977,6 +5684,15 @@ __metadata: languageName: node linkType: hard +"@types/adm-zip@npm:^0.5.0": + version: 0.5.5 + resolution: "@types/adm-zip@npm:0.5.5" + dependencies: + "@types/node": "*" + checksum: 808c25b8a1c2e1c594cf9b1514e7953105cf96e19e38aa7dc109ff2537bda7345b950ef1f4e54a6e824e5503e29d24b0ff6d0aa1ff9bd4afb79ef0ef2df9ebab + languageName: node + linkType: hard + "@types/babel__code-frame@npm:^7.0.2": version: 7.0.6 resolution: "@types/babel__code-frame@npm:7.0.6" @@ -5144,7 +5860,7 @@ __metadata: languageName: node linkType: hard -"@types/estree@npm:*, @types/estree@npm:^1.0.0": +"@types/estree@npm:*, @types/estree@npm:1.0.5, @types/estree@npm:^1.0.0": version: 1.0.5 resolution: "@types/estree@npm:1.0.5" checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a @@ -5182,15 +5898,6 @@ __metadata: languageName: node linkType: hard -"@types/fs-extra@npm:^9.0.13": - version: 9.0.13 - resolution: "@types/fs-extra@npm:9.0.13" - dependencies: - "@types/node": "*" - checksum: add79e212acd5ac76b97b9045834e03a7996aef60a814185e0459088fd290519a3c1620865d588fa36c4498bf614210d2a703af5cf80aa1dbc125db78f6edac3 - languageName: node - linkType: hard - "@types/gtag.js@npm:^0.0.12": version: 0.0.12 resolution: "@types/gtag.js@npm:0.0.12" @@ -5340,13 +6047,6 @@ __metadata: languageName: node linkType: hard -"@types/lodash@npm:^4": - version: 4.14.202 - resolution: "@types/lodash@npm:4.14.202" - checksum: a91acf3564a568c6f199912f3eb2c76c99c5a0d7e219394294213b3f2d54f672619f0fde4da22b29dc5d4c31457cd799acc2e5cb6bd90f9af04a1578483b6ff7 - languageName: node - linkType: hard - "@types/lru-cache@npm:^5.1.0": version: 5.1.1 resolution: "@types/lru-cache@npm:5.1.1" @@ -5393,14 +6093,16 @@ __metadata: languageName: node linkType: hard -"@types/minimist@npm:^1.2.2": - version: 1.2.5 - resolution: "@types/minimist@npm:1.2.5" - checksum: 477047b606005058ab0263c4f58097136268007f320003c348794f74adedc3166ffc47c80ec3e94687787f2ab7f4e72c468223946e79892cf0fd9e25e9970a90 +"@types/mocha-each@npm:^2": + version: 2.0.4 + resolution: "@types/mocha-each@npm:2.0.4" + dependencies: + "@types/mocha": "*" + checksum: 2588284db079e2d0a17735c8fb5c12cba9feabf2de55c9ab49e1f3b38cc522691d30ed3abb1bcb21c087b27f373e3f4123ef7bd8d9a4f95cef38f6c8045c71f3 languageName: node linkType: hard -"@types/mocha@npm:^10.0.1": +"@types/mocha@npm:*, @types/mocha@npm:^10.0.1, @types/mocha@npm:^10.0.6": version: 10.0.6 resolution: "@types/mocha@npm:10.0.6" checksum: f7c836cf6cf27dc0f5970d262591b56f2a3caeaec8cfdc612c12e1cfbb207f601f710ece207e935164d4e3343b93be5054d0db5544f31f453b3923775d82099f @@ -5453,12 +6155,19 @@ __metadata: languageName: node linkType: hard -"@types/node@npm:^18.7.20": - version: 18.19.3 - resolution: "@types/node@npm:18.19.3" +"@types/node@npm:^20.10.5": + version: 20.10.5 + resolution: "@types/node@npm:20.10.5" dependencies: undici-types: ~5.26.4 - checksum: 58c4fa45a78fcec75c78182a4b266395905957633654eb0311c5f9c30ac15c179ea2287ab1af034e46c2db7bb0589ef0000ee64c1de8f568a0aad29eaadb100c + checksum: e216b679f545a8356960ce985a0e53c3a58fff0eacd855e180b9e223b8db2b5bd07b744a002b8c1f0c37f9194648ab4578533b5c12df2ec10cc02f61d20948d2 + languageName: node + linkType: hard + +"@types/pako@npm:^2": + version: 2.0.3 + resolution: "@types/pako@npm:2.0.3" + checksum: 0746dd5d29eccf5b2e6cceb3ccb093851219e78bd2e2e20d25757e247987139e061e5d4ba37cb5295493f06e3c683c74f8876011cd8a3f3748a09244fbc841d9 languageName: node linkType: hard @@ -5483,6 +6192,13 @@ __metadata: languageName: node linkType: hard +"@types/path-browserify@npm:^1": + version: 1.0.2 + resolution: "@types/path-browserify@npm:1.0.2" + checksum: 7b26df1a32827f7e588009386f62e0f900e8d349722b1b8b8c100cd2c4cc1e95c64099f5ec879cd131e69869088f336998a3ddb17085b68312414431a4eace01 + languageName: node + linkType: hard + "@types/pbkdf2@npm:^3.0.0": version: 3.1.2 resolution: "@types/pbkdf2@npm:3.1.2" @@ -5515,13 +6231,6 @@ __metadata: languageName: node linkType: hard -"@types/ps-tree@npm:^1.1.2": - version: 1.1.6 - resolution: "@types/ps-tree@npm:1.1.6" - checksum: bf5b7bb9bd11b8762a8302b93c335728ecb19c85a74c640a3888d476368a03733f11612b9a87b1ad9ea56f95720db23a824c78113b16024dc59264b7f9008df5 - languageName: node - linkType: hard - "@types/qs@npm:*": version: 6.9.10 resolution: "@types/qs@npm:6.9.10" @@ -5589,6 +6298,16 @@ __metadata: languageName: node linkType: hard +"@types/readable-stream@npm:^4": + version: 4.0.10 + resolution: "@types/readable-stream@npm:4.0.10" + dependencies: + "@types/node": "*" + safe-buffer: ~5.1.1 + checksum: dc7cb95be737d442770fcaf1fbe3860c011f5f024c6f09c8b032f95b7ef9cce0031302715afe13e18d264b3a162e5838ce78873ec8d63eb238db49bc70906a21 + languageName: node + linkType: hard + "@types/resolve@npm:1.17.1": version: 1.17.1 resolution: "@types/resolve@npm:1.17.1" @@ -5598,6 +6317,13 @@ __metadata: languageName: node linkType: hard +"@types/resolve@npm:1.20.2": + version: 1.20.2 + resolution: "@types/resolve@npm:1.20.2" + checksum: 61c2cad2499ffc8eab36e3b773945d337d848d3ac6b7b0a87c805ba814bc838ef2f262fc0f109bfd8d2e0898ff8bd80ad1025f9ff64f1f71d3d4294c9f14e5f6 + languageName: node + linkType: hard + "@types/responselike@npm:^1.0.0": version: 1.0.3 resolution: "@types/responselike@npm:1.0.3" @@ -5676,6 +6402,22 @@ __metadata: languageName: node linkType: hard +"@types/sinon@npm:^17": + version: 17.0.2 + resolution: "@types/sinon@npm:17.0.2" + dependencies: + "@types/sinonjs__fake-timers": "*" + checksum: 3a56615f2dc7d0b67d3e4b8ae358df2ff2164d89fabb22e9b46e6afe7d4df844a354ea65d409af9baf29ac0103ea562ab44dd0176405a9cf82a4ff183939f22f + languageName: node + linkType: hard + +"@types/sinonjs__fake-timers@npm:*": + version: 8.1.5 + resolution: "@types/sinonjs__fake-timers@npm:8.1.5" + checksum: 7e3c08f6c13df44f3ea7d9a5155ddf77e3f7314c156fa1c5a829a4f3763bafe2f75b1283b887f06e6b4296996a2f299b70f64ff82625f9af5885436e2524d10c + languageName: node + linkType: hard + "@types/sockjs@npm:^0.3.33": version: 0.3.36 resolution: "@types/sockjs@npm:0.3.36" @@ -5699,13 +6441,6 @@ __metadata: languageName: node linkType: hard -"@types/which@npm:^2.0.1": - version: 2.0.2 - resolution: "@types/which@npm:2.0.2" - checksum: 8626a3c2f6db676c449142e1082e33ea0c9d88b8a2bd796366b944891e6da0088b2aa83d3fa9c79e6696f7381a851fc76d43bd353eb6c4d98a7775b4ae0a96a5 - languageName: node - linkType: hard - "@types/ws@npm:^7.4.0": version: 7.4.7 resolution: "@types/ws@npm:7.4.7" @@ -5878,6 +6613,18 @@ __metadata: languageName: node linkType: hard +"@wasm-tool/wasm-pack-plugin@npm:^1.7.0": + version: 1.7.0 + resolution: "@wasm-tool/wasm-pack-plugin@npm:1.7.0" + dependencies: + chalk: ^2.4.1 + command-exists: ^1.2.7 + watchpack: ^2.1.1 + which: ^2.0.2 + checksum: b0a09f70827b262c4b4689296aae712cc7d9fb742e32eb7e7b720a5b45be1d256864c84c37c8f97de4c762cdae9c35be4a37cdea6832281056921edf4944aea4 + languageName: node + linkType: hard + "@web/browser-logs@npm:^0.2.6": version: 0.2.6 resolution: "@web/browser-logs@npm:0.2.6" @@ -5896,6 +6643,15 @@ __metadata: languageName: node linkType: hard +"@web/browser-logs@npm:^0.4.0": + version: 0.4.0 + resolution: "@web/browser-logs@npm:0.4.0" + dependencies: + errorstacks: ^2.2.0 + checksum: 65c6c4312b1ff00ff40fd15c07708f5e113bb8b4f87c972356010546a0664287dd4f64c5475f8be27b6abb29b9272ecc4162aada90ffb732f1779ceec5fd8ad6 + languageName: node + linkType: hard + "@web/config-loader@npm:^0.1.3": version: 0.1.3 resolution: "@web/config-loader@npm:0.1.3" @@ -5905,6 +6661,13 @@ __metadata: languageName: node linkType: hard +"@web/config-loader@npm:^0.3.0": + version: 0.3.1 + resolution: "@web/config-loader@npm:0.3.1" + checksum: 3ff87dc5cf44a98be97477b1d4c36673d282e7567b61e554aff56f8674a187938c51cd8045f868ca5f4b6ff4d52c72fbbc176c26f27df4335f2341d105d2b535 + languageName: node + linkType: hard + "@web/dev-server-core@npm:^0.4.1": version: 0.4.1 resolution: "@web/dev-server-core@npm:0.4.1" @@ -5957,6 +6720,32 @@ __metadata: languageName: node linkType: hard +"@web/dev-server-core@npm:^0.7.0": + version: 0.7.0 + resolution: "@web/dev-server-core@npm:0.7.0" + dependencies: + "@types/koa": ^2.11.6 + "@types/ws": ^7.4.0 + "@web/parse5-utils": ^2.1.0 + chokidar: ^3.4.3 + clone: ^2.1.2 + es-module-lexer: ^1.0.0 + get-stream: ^6.0.0 + is-stream: ^2.0.0 + isbinaryfile: ^5.0.0 + koa: ^2.13.0 + koa-etag: ^4.0.0 + koa-send: ^5.0.1 + koa-static: ^5.0.0 + lru-cache: ^8.0.4 + mime-types: ^2.1.27 + parse5: ^6.0.1 + picomatch: ^2.2.2 + ws: ^7.4.2 + checksum: 68c4440c75eca686626182af94d40f8ffa2e7848074692abd930ae69490af1866c5d10f13e8ad6745fb7ae386ba91377b4bcdbd74604a9600ce4ab2c8df1576c + languageName: node + linkType: hard + "@web/dev-server-esbuild@npm:^0.3.6": version: 0.3.6 resolution: "@web/dev-server-esbuild@npm:0.3.6" @@ -5984,6 +6773,20 @@ __metadata: languageName: node linkType: hard +"@web/dev-server-rollup@npm:^0.6.1": + version: 0.6.1 + resolution: "@web/dev-server-rollup@npm:0.6.1" + dependencies: + "@rollup/plugin-node-resolve": ^15.0.1 + "@web/dev-server-core": ^0.7.0 + nanocolors: ^0.2.1 + parse5: ^6.0.1 + rollup: ^4.4.0 + whatwg-url: ^11.0.0 + checksum: 59536e38d8519bc1edbf4e11adf36c4f5273c02f5fbc5171962cb7acadea2cfd8c44df057ff4422282f34964668280b3564f6b053bfee5327dc0368a382f2e00 + languageName: node + linkType: hard + "@web/dev-server@npm:^0.1.38": version: 0.1.38 resolution: "@web/dev-server@npm:0.1.38" @@ -6009,6 +6812,31 @@ __metadata: languageName: node linkType: hard +"@web/dev-server@npm:^0.4.0": + version: 0.4.1 + resolution: "@web/dev-server@npm:0.4.1" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/command-line-args": ^5.0.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server-core": ^0.7.0 + "@web/dev-server-rollup": ^0.6.1 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + debounce: ^1.2.0 + deepmerge: ^4.2.2 + ip: ^1.1.5 + nanocolors: ^0.2.1 + open: ^8.0.2 + portfinder: ^1.0.32 + bin: + wds: dist/bin.js + web-dev-server: dist/bin.js + checksum: 2d9f2fcef4511e9965de24fef0ff4430a525ccc7bdc2069e51a96d1aaab4a50a935e32725dd128bb3641be4d6a2acb870f0d645208b59036bfd9d7f55c0a33a4 + languageName: node + linkType: hard + "@web/parse5-utils@npm:^1.3.1": version: 1.3.1 resolution: "@web/parse5-utils@npm:1.3.1" @@ -6019,7 +6847,7 @@ __metadata: languageName: node linkType: hard -"@web/parse5-utils@npm:^2.0.2": +"@web/parse5-utils@npm:^2.0.2, @web/parse5-utils@npm:^2.1.0": version: 2.1.0 resolution: "@web/parse5-utils@npm:2.1.0" dependencies: @@ -6041,6 +6869,19 @@ __metadata: languageName: node linkType: hard +"@web/test-runner-chrome@npm:^0.15.0": + version: 0.15.0 + resolution: "@web/test-runner-chrome@npm:0.15.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 + async-mutex: 0.4.0 + chrome-launcher: ^0.15.0 + puppeteer-core: ^20.0.0 + checksum: 091aa83707aa1a6ade8074c37050f9a0fae2729f223b5e7d756f86ccdadcd85e738cc47d0a4ae8ac6ea930142cc20e341f5d3ad30a3a81d6666b353a7e8c2dd4 + languageName: node + linkType: hard + "@web/test-runner-commands@npm:^0.6.6": version: 0.6.6 resolution: "@web/test-runner-commands@npm:0.6.6" @@ -6051,6 +6892,16 @@ __metadata: languageName: node linkType: hard +"@web/test-runner-commands@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-commands@npm:0.9.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + mkdirp: ^1.0.4 + checksum: df226f76148c5967df68c2589549b10ffe75f3d34a31d63bea132447271cdf073de7350aa680fcbf4315737b909fc44faad23d9f8e7e3ce37e93e05e67a7f295 + languageName: node + linkType: hard + "@web/test-runner-core@npm:^0.10.20, @web/test-runner-core@npm:^0.10.29": version: 0.10.29 resolution: "@web/test-runner-core@npm:0.10.29" @@ -6119,6 +6970,40 @@ __metadata: languageName: node linkType: hard +"@web/test-runner-core@npm:^0.13.0": + version: 0.13.0 + resolution: "@web/test-runner-core@npm:0.13.0" + dependencies: + "@babel/code-frame": ^7.12.11 + "@types/babel__code-frame": ^7.0.2 + "@types/co-body": ^6.1.0 + "@types/convert-source-map": ^2.0.0 + "@types/debounce": ^1.2.0 + "@types/istanbul-lib-coverage": ^2.0.3 + "@types/istanbul-reports": ^3.0.0 + "@web/browser-logs": ^0.4.0 + "@web/dev-server-core": ^0.7.0 + chokidar: ^3.4.3 + cli-cursor: ^3.1.0 + co-body: ^6.1.0 + convert-source-map: ^2.0.0 + debounce: ^1.2.0 + dependency-graph: ^0.11.0 + globby: ^11.0.1 + ip: ^1.1.5 + istanbul-lib-coverage: ^3.0.0 + istanbul-lib-report: ^3.0.1 + istanbul-reports: ^3.0.2 + log-update: ^4.0.0 + nanocolors: ^0.2.1 + nanoid: ^3.1.25 + open: ^8.0.2 + picomatch: ^2.2.2 + source-map: ^0.7.3 + checksum: 6c2bd4962da51521dc0d2cf660bf4e1fa3a9a92166a8c71e85a9fe9160710bbe83658dfceae6108123dcf85bf41fbabd8d3b7294f350e2f732f3713fcaa9a024 + languageName: node + linkType: hard + "@web/test-runner-coverage-v8@npm:^0.5.0": version: 0.5.0 resolution: "@web/test-runner-coverage-v8@npm:0.5.0" @@ -6144,6 +7029,19 @@ __metadata: languageName: node linkType: hard +"@web/test-runner-coverage-v8@npm:^0.8.0": + version: 0.8.0 + resolution: "@web/test-runner-coverage-v8@npm:0.8.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + istanbul-lib-coverage: ^3.0.0 + lru-cache: ^8.0.4 + picomatch: ^2.2.2 + v8-to-istanbul: ^9.0.1 + checksum: 343f834372b3aeb2c24f4b03ce956d8ad851ef2a85b94507651c2a65321fcdff1b26a2c44d7516e97d9c42786bb003b9c245ad0798a414a814d0264fdbe0761e + languageName: node + linkType: hard + "@web/test-runner-mocha@npm:^0.7.5": version: 0.7.5 resolution: "@web/test-runner-mocha@npm:0.7.5" @@ -6154,14 +7052,34 @@ __metadata: languageName: node linkType: hard +"@web/test-runner-mocha@npm:^0.9.0": + version: 0.9.0 + resolution: "@web/test-runner-mocha@npm:0.9.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + checksum: bcc9410ac9d679e7bb804fc5720b2a0ed3b4d08f2b49c03f2157f5b54c7f525a432712e1da644f04e5190c2480af2dc46a4c736cdba3fda3ba5fa98fd0f01a94 + languageName: node + linkType: hard + "@web/test-runner-playwright@npm:^0.10.0": version: 0.10.3 resolution: "@web/test-runner-playwright@npm:0.10.3" dependencies: - "@web/test-runner-core": ^0.12.0 - "@web/test-runner-coverage-v8": ^0.7.3 + "@web/test-runner-core": ^0.12.0 + "@web/test-runner-coverage-v8": ^0.7.3 + playwright: ^1.22.2 + checksum: 7c765d34482f2e299742c3ffe80790229d0825569016ccfccbb1a0c915f89551a3cc14a1454ed7c6895aaa03605ea444f7c1846eeab82bf02702e87a60628b3c + languageName: node + linkType: hard + +"@web/test-runner-playwright@npm:^0.11.0": + version: 0.11.0 + resolution: "@web/test-runner-playwright@npm:0.11.0" + dependencies: + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-coverage-v8": ^0.8.0 playwright: ^1.22.2 - checksum: 7c765d34482f2e299742c3ffe80790229d0825569016ccfccbb1a0c915f89551a3cc14a1454ed7c6895aaa03605ea444f7c1846eeab82bf02702e87a60628b3c + checksum: 3618b0b559d865af1211b3d86ec57487b32722a0dd640c9a3faca6c692cceec75176d742d0db73c823d300fbe761e5c15b2fad0c5096c3dbfedab313ed1aa7fe languageName: node linkType: hard @@ -6192,6 +7110,33 @@ __metadata: languageName: node linkType: hard +"@web/test-runner@npm:^0.18.0": + version: 0.18.0 + resolution: "@web/test-runner@npm:0.18.0" + dependencies: + "@web/browser-logs": ^0.4.0 + "@web/config-loader": ^0.3.0 + "@web/dev-server": ^0.4.0 + "@web/test-runner-chrome": ^0.15.0 + "@web/test-runner-commands": ^0.9.0 + "@web/test-runner-core": ^0.13.0 + "@web/test-runner-mocha": ^0.9.0 + camelcase: ^6.2.0 + command-line-args: ^5.1.1 + command-line-usage: ^7.0.1 + convert-source-map: ^2.0.0 + diff: ^5.0.0 + globby: ^11.0.1 + nanocolors: ^0.2.1 + portfinder: ^1.0.32 + source-map: ^0.7.3 + bin: + web-test-runner: dist/bin.js + wtr: dist/bin.js + checksum: d5e410f08cb954f9854a3d837f5f704b578376ee8b0452cff66aeca2eb3cb98e50556ca3b958bda567b42af2ef2cd0a7424eaea40f9b3e80362ae788fbd33118 + languageName: node + linkType: hard + "@webassemblyjs/ast@npm:1.11.6, @webassemblyjs/ast@npm:^1.11.5": version: 1.11.6 resolution: "@webassemblyjs/ast@npm:1.11.6" @@ -6343,6 +7288,39 @@ __metadata: languageName: node linkType: hard +"@webpack-cli/configtest@npm:^1.2.0": + version: 1.2.0 + resolution: "@webpack-cli/configtest@npm:1.2.0" + peerDependencies: + webpack: 4.x.x || 5.x.x + webpack-cli: 4.x.x + checksum: a2726cd9ec601d2b57e5fc15e0ebf5200a8892065e735911269ac2038e62be4bfc176ea1f88c2c46ff09b4d05d4c10ae045e87b3679372483d47da625a327e28 + languageName: node + linkType: hard + +"@webpack-cli/info@npm:^1.5.0": + version: 1.5.0 + resolution: "@webpack-cli/info@npm:1.5.0" + dependencies: + envinfo: ^7.7.3 + peerDependencies: + webpack-cli: 4.x.x + checksum: 7f56fe037cd7d1fd5c7428588519fbf04a0cad33925ee4202ffbafd00f8ec1f2f67d991245e687d50e0f3e23f7b7814273d56cb9f7da4b05eed47c8d815c6296 + languageName: node + linkType: hard + +"@webpack-cli/serve@npm:^1.7.0": + version: 1.7.0 + resolution: "@webpack-cli/serve@npm:1.7.0" + peerDependencies: + webpack-cli: 4.x.x + peerDependenciesMeta: + webpack-dev-server: + optional: true + checksum: d475e8effa23eb7ff9a48b14d4de425989fd82f906ce71c210921cc3852327c22873be00c35e181a25a6bd03d424ae2b83e7f3b3f410ac7ee31b128ab4ac7713 + languageName: node + linkType: hard + "@xtuc/ieee754@npm:^1.2.0": version: 1.2.0 resolution: "@xtuc/ieee754@npm:1.2.0" @@ -6371,6 +7349,15 @@ __metadata: languageName: node linkType: hard +"abort-controller@npm:^3.0.0": + version: 3.0.0 + resolution: "abort-controller@npm:3.0.0" + dependencies: + event-target-shim: ^5.0.0 + checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75 + languageName: node + linkType: hard + "abstract-level@npm:^1.0.0, abstract-level@npm:^1.0.2, abstract-level@npm:^1.0.3": version: 1.0.3 resolution: "abstract-level@npm:1.0.3" @@ -6386,6 +7373,15 @@ __metadata: languageName: node linkType: hard +"abstract-leveldown@npm:~0.12.0, abstract-leveldown@npm:~0.12.1": + version: 0.12.4 + resolution: "abstract-leveldown@npm:0.12.4" + dependencies: + xtend: ~3.0.0 + checksum: e300f04bb638cc9c462f6e8fa925672e51beb24c1470c39ece709e54f2f499661ac5fe0119175c7dcb6e32c843423d6960009d4d24e72526478b261163e8070b + languageName: node + linkType: hard + "accepts@npm:^1.3.5, accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.8": version: 1.3.8 resolution: "accepts@npm:1.3.8" @@ -6444,6 +7440,13 @@ __metadata: languageName: node linkType: hard +"adm-zip@npm:^0.5.0": + version: 0.5.10 + resolution: "adm-zip@npm:0.5.10" + checksum: 07ed91cf6423bf5dca4ee63977bc7635e91b8d21829c00829d48dce4c6932e1b19e6cfcbe44f1931c956e68795ae97183fc775913883fa48ce88a1ac11fb2034 + languageName: node + linkType: hard + "aes-js@npm:3.0.0": version: 3.0.0 resolution: "aes-js@npm:3.0.0" @@ -6693,7 +7696,7 @@ __metadata: languageName: node linkType: hard -"arg@npm:5.0.2, arg@npm:^5.0.0": +"arg@npm:5.0.2, arg@npm:^5.0.0, arg@npm:^5.0.2": version: 5.0.2 resolution: "arg@npm:5.0.2" checksum: 6c69ada1a9943d332d9e5382393e897c500908d91d5cb735a01120d5f71daf1b339b7b8980cbeaba8fd1afc68e658a739746179e4315a26e8a28951ff9930078 @@ -6758,6 +7761,13 @@ __metadata: languageName: node linkType: hard +"array-timsort@npm:^1.0.3": + version: 1.0.3 + resolution: "array-timsort@npm:1.0.3" + checksum: fd4b5b0911214bdc8b5699ed10d309685551b518b3819c611c967cff59b87aee01cf591a10e36a3f14dbff696984bd6682b845f6fdbf1217195e910f241a4f78 + languageName: node + linkType: hard + "array-union@npm:^2.1.0": version: 2.1.0 resolution: "array-union@npm:2.1.0" @@ -6765,6 +7775,19 @@ __metadata: languageName: node linkType: hard +"assert@npm:^2.1.0": + version: 2.1.0 + resolution: "assert@npm:2.1.0" + dependencies: + call-bind: ^1.0.2 + is-nan: ^1.3.2 + object-is: ^1.1.5 + object.assign: ^4.1.4 + util: ^0.12.5 + checksum: 1ed1cabba9abe55f4109b3f7292b4e4f3cf2953aad8dc148c0b3c3bd676675c31b1abb32ef563b7d5a19d1715bf90d1e5f09fad2a4ee655199468902da80f7c2 + languageName: node + linkType: hard + "assertion-error@npm:^1.1.0": version: 1.1.0 resolution: "assertion-error@npm:1.1.0" @@ -6772,6 +7795,15 @@ __metadata: languageName: node linkType: hard +"ast-types@npm:^0.13.4": + version: 0.13.4 + resolution: "ast-types@npm:0.13.4" + dependencies: + tslib: ^2.0.1 + checksum: 5a51f7b70588ecced3601845a0e203279ca2f5fdc184416a0a1640c93ec0a267241d6090a328e78eebb8de81f8754754e0a4f1558ba2a3d638f8ccbd0b1f0eff + languageName: node + linkType: hard + "astral-regex@npm:^2.0.0": version: 2.0.0 resolution: "astral-regex@npm:2.0.0" @@ -6788,6 +7820,15 @@ __metadata: languageName: node linkType: hard +"async-mutex@npm:0.4.0": + version: 0.4.0 + resolution: "async-mutex@npm:0.4.0" + dependencies: + tslib: ^2.4.0 + checksum: 813a71728b35a4fbfd64dba719f04726d9133c67b577fcd951b7028c4a675a13ee34e69beb82d621f87bf81f5d4f135c4c44be0448550c7db728547244ef71fc + languageName: node + linkType: hard + "async@npm:^2.6.4": version: 2.6.4 resolution: "async@npm:2.6.4" @@ -6829,6 +7870,13 @@ __metadata: languageName: node linkType: hard +"available-typed-arrays@npm:^1.0.5": + version: 1.0.5 + resolution: "available-typed-arrays@npm:1.0.5" + checksum: 20eb47b3cefd7db027b9bbb993c658abd36d4edd3fe1060e83699a03ee275b0c9b216cc076ff3f2db29073225fb70e7613987af14269ac1fe2a19803ccc97f1a + languageName: node + linkType: hard + "axios@npm:^0.25.0": version: 0.25.0 resolution: "axios@npm:0.25.0" @@ -6849,6 +7897,13 @@ __metadata: languageName: node linkType: hard +"b4a@npm:^1.6.4": + version: 1.6.4 + resolution: "b4a@npm:1.6.4" + checksum: 81b086f9af1f8845fbef4476307236bda3d660c158c201db976f19cdce05f41f93110ab6b12fd7a2696602a490cc43d5410ee36a56d6eef93afb0d6ca69ac3b2 + languageName: node + linkType: hard + "babel-loader@npm:^8.2.5": version: 8.3.0 resolution: "babel-loader@npm:8.3.0" @@ -6980,6 +8035,13 @@ __metadata: languageName: node linkType: hard +"basic-ftp@npm:^5.0.2": + version: 5.0.4 + resolution: "basic-ftp@npm:5.0.4" + checksum: 57725f24debd8c1b36f9bad1bfee39c5d9f5997f32a23e5c957389dcc64373a13b41711e5723b4a3b616a93530b345686119f480c27a115b2fde944c1652ceb1 + languageName: node + linkType: hard + "batch@npm:0.6.1": version: 0.6.1 resolution: "batch@npm:0.6.1" @@ -7033,6 +8095,15 @@ __metadata: languageName: node linkType: hard +"bl@npm:~0.8.1": + version: 0.8.2 + resolution: "bl@npm:0.8.2" + dependencies: + readable-stream: ~1.0.26 + checksum: 18767c5c861ae1cdbb000bb346e9e8e29137225e8eef97f39db78beeb236beca609f465580c5c1b177d621505f57400834fb4a17a66d264f33a0237293ec2ac5 + languageName: node + linkType: hard + "blakejs@npm:^1.1.0": version: 1.2.1 resolution: "blakejs@npm:1.2.1" @@ -7234,6 +8305,17 @@ __metadata: languageName: node linkType: hard +"browserify-fs@npm:^1.0.0": + version: 1.0.0 + resolution: "browserify-fs@npm:1.0.0" + dependencies: + level-filesystem: ^1.0.1 + level-js: ^2.1.3 + levelup: ^0.18.2 + checksum: e0c35cf42c839c0a217048b1671d91ee6e53fd05f163db4f809e46c2f6264f784768e7c850abc200b0eaca378d42e00e01876eda21fd84fc0a4280bd6200a9c3 + languageName: node + linkType: hard + "browserslist@npm:^4.0.0, browserslist@npm:^4.14.5, browserslist@npm:^4.18.1, browserslist@npm:^4.21.10, browserslist@npm:^4.21.4, browserslist@npm:^4.21.9, browserslist@npm:^4.22.2": version: 4.22.2 resolution: "browserslist@npm:4.22.2" @@ -7406,7 +8488,7 @@ __metadata: languageName: node linkType: hard -"call-bind@npm:^1.0.0, call-bind@npm:^1.0.5": +"call-bind@npm:^1.0.0, call-bind@npm:^1.0.2, call-bind@npm:^1.0.4, call-bind@npm:^1.0.5": version: 1.0.5 resolution: "call-bind@npm:1.0.5" dependencies: @@ -7417,7 +8499,7 @@ __metadata: languageName: node linkType: hard -"callsites@npm:^3.0.0": +"callsites@npm:^3.0.0, callsites@npm:^3.1.0": version: 3.1.0 resolution: "callsites@npm:3.1.0" checksum: 072d17b6abb459c2ba96598918b55868af677154bec7e73d222ef95a8fdb9bbf7dae96a8421085cdad8cd190d86653b5b6dc55a4484f2e5b2e27d5e0c3fc15b3 @@ -7513,7 +8595,7 @@ __metadata: languageName: node linkType: hard -"chai@npm:^4.3.7, chai@npm:^4.3.8": +"chai@npm:^4.3.10, chai@npm:^4.3.7, chai@npm:^4.3.8": version: 4.3.10 resolution: "chai@npm:4.3.10" dependencies: @@ -7537,6 +8619,15 @@ __metadata: languageName: node linkType: hard +"chalk-template@npm:^1.1.0": + version: 1.1.0 + resolution: "chalk-template@npm:1.1.0" + dependencies: + chalk: ^5.2.0 + checksum: 868aae8d4e7556ad2f35de4e04fe65dbe1ea6c5c80ad783f1c156d0a5c33f444c6814f49cbb68fe348c78e99daf2bcf566b47ad7e13603e4691ca78b2f422824 + languageName: node + linkType: hard + "chalk@npm:5.0.1": version: 5.0.1 resolution: "chalk@npm:5.0.1" @@ -7544,7 +8635,7 @@ __metadata: languageName: node linkType: hard -"chalk@npm:^2.4.2": +"chalk@npm:^2.4.1, chalk@npm:^2.4.2": version: 2.4.2 resolution: "chalk@npm:2.4.2" dependencies: @@ -7565,7 +8656,7 @@ __metadata: languageName: node linkType: hard -"chalk@npm:^5.0.1, chalk@npm:^5.2.0": +"chalk@npm:^5.0.1, chalk@npm:^5.2.0, chalk@npm:^5.3.0": version: 5.3.0 resolution: "chalk@npm:5.3.0" checksum: 623922e077b7d1e9dedaea6f8b9e9352921f8ae3afe739132e0e00c275971bdd331268183b2628cf4ab1727c45ea1f28d7e24ac23ce1db1eb653c414ca8a5a80 @@ -7720,6 +8811,17 @@ __metadata: languageName: node linkType: hard +"chromium-bidi@npm:0.4.16": + version: 0.4.16 + resolution: "chromium-bidi@npm:0.4.16" + dependencies: + mitt: 3.0.0 + peerDependencies: + devtools-protocol: "*" + checksum: 9cbb362fdf589dbdfd1618499c5bbdac45a3aa1291c1d2faa2f1ea3768738677985175d1bb1511dfe3e188bc78e6ea2acb453564ece7e09f535bbcd2253ce06a + languageName: node + linkType: hard + "chromium-bidi@npm:0.4.7": version: 0.4.7 resolution: "chromium-bidi@npm:0.4.7" @@ -7785,6 +8887,16 @@ __metadata: languageName: node linkType: hard +"clear-module@npm:^4.1.2": + version: 4.1.2 + resolution: "clear-module@npm:4.1.2" + dependencies: + parent-module: ^2.0.0 + resolve-from: ^5.0.0 + checksum: 4931f0c461f5d7b9b79f62c2d1bc31c37f7f1d33b4e95eef7080a83955c0374f4c180f5a96cc4d63bbefc64a9aa5d12b155641109e8e489dfa50fd5820e5101f + languageName: node + linkType: hard + "cli-boxes@npm:^2.2.1": version: 2.2.1 resolution: "cli-boxes@npm:2.2.1" @@ -7881,6 +8993,13 @@ __metadata: languageName: node linkType: hard +"clone@npm:~0.1.9": + version: 0.1.19 + resolution: "clone@npm:0.1.19" + checksum: 5e710e16da67abe30c0664c8fd69c280635be59a4fae0a5fe58ed324e701e99348b48ce67288716fa223edd42ba574e58a3783cb2fcfa381b8b49ce7e56ac3f4 + languageName: node + linkType: hard + "clsx@npm:^1.1.1, clsx@npm:^1.2.1": version: 1.2.1 resolution: "clsx@npm:1.2.1" @@ -7967,7 +9086,7 @@ __metadata: languageName: node linkType: hard -"colorette@npm:^2.0.10": +"colorette@npm:^2.0.10, colorette@npm:^2.0.14": version: 2.0.20 resolution: "colorette@npm:2.0.20" checksum: 0c016fea2b91b733eb9f4bcdb580018f52c0bc0979443dad930e5037a968237ac53d9beb98e218d2e9235834f8eebce7f8e080422d6194e957454255bde71d3d @@ -8011,7 +9130,7 @@ __metadata: languageName: node linkType: hard -"command-exists@npm:^1.2.8": +"command-exists@npm:^1.2.7, command-exists@npm:^1.2.8": version: 1.2.9 resolution: "command-exists@npm:1.2.9" checksum: 729ae3d88a2058c93c58840f30341b7f82688a573019535d198b57a4d8cb0135ced0ad7f52b591e5b28a90feb2c675080ce916e56254a0f7c15cb2395277cac3 @@ -8068,6 +9187,13 @@ __metadata: languageName: node linkType: hard +"commander@npm:^11.1.0": + version: 11.1.0 + resolution: "commander@npm:11.1.0" + checksum: fd1a8557c6b5b622c89ecdfde703242ab7db3b628ea5d1755784c79b8e7cb0d74d65b4a262289b533359cd58e1bfc0bf50245dfbcd2954682a6f367c828b79ef + languageName: node + linkType: hard + "commander@npm:^2.20.0": version: 2.20.3 resolution: "commander@npm:2.20.3" @@ -8082,7 +9208,7 @@ __metadata: languageName: node linkType: hard -"commander@npm:^7.2.0": +"commander@npm:^7.0.0, commander@npm:^7.2.0": version: 7.2.0 resolution: "commander@npm:7.2.0" checksum: 53501cbeee61d5157546c0bef0fedb6cdfc763a882136284bed9a07225f09a14b82d2a84e7637edfd1a679fb35ed9502fd58ef1d091e6287f60d790147f68ddc @@ -8096,6 +9222,19 @@ __metadata: languageName: node linkType: hard +"comment-json@npm:^4.2.3": + version: 4.2.3 + resolution: "comment-json@npm:4.2.3" + dependencies: + array-timsort: ^1.0.3 + core-util-is: ^1.0.3 + esprima: ^4.0.1 + has-own-prop: ^2.0.0 + repeat-string: ^1.6.1 + checksum: 7f8d26266b0d49de9661f6365cbcc373fee4f4d0f422a203dfb17ad8f3d84c5be5ded444874935a197cd03cff297c53fe48910256cb4171cb2e52a3e6b9d317c + languageName: node + linkType: hard + "common-path-prefix@npm:^3.0.0": version: 3.0.0 resolution: "common-path-prefix@npm:3.0.0" @@ -8141,6 +9280,18 @@ __metadata: languageName: node linkType: hard +"concat-stream@npm:^1.4.4": + version: 1.6.2 + resolution: "concat-stream@npm:1.6.2" + dependencies: + buffer-from: ^1.0.0 + inherits: ^2.0.3 + readable-stream: ^2.2.2 + typedarray: ^0.0.6 + checksum: 1ef77032cb4459dcd5187bd710d6fc962b067b64ec6a505810de3d2b8cc0605638551b42f8ec91edf6fcd26141b32ef19ad749239b58fae3aba99187adc32285 + languageName: node + linkType: hard + "config-chain@npm:^1.1.11": version: 1.1.13 resolution: "config-chain@npm:1.1.13" @@ -8306,7 +9457,7 @@ __metadata: languageName: node linkType: hard -"core-util-is@npm:~1.0.0": +"core-util-is@npm:^1.0.3, core-util-is@npm:~1.0.0": version: 1.0.3 resolution: "core-util-is@npm:1.0.3" checksum: 9de8597363a8e9b9952491ebe18167e3b36e7707569eed0ebf14f8bba773611376466ae34575bca8cfe3c767890c859c74056084738f09d4e4a6f902b2ad7d99 @@ -8408,6 +9559,15 @@ __metadata: languageName: node linkType: hard +"cross-fetch@npm:4.0.0": + version: 4.0.0 + resolution: "cross-fetch@npm:4.0.0" + dependencies: + node-fetch: ^2.6.12 + checksum: ecca4f37ffa0e8283e7a8a590926b66713a7ef7892757aa36c2d20ffa27b0ac5c60dcf453119c809abe5923fc0bae3702a4d896bfb406ef1077b0d0018213e24 + languageName: node + linkType: hard + "cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3": version: 7.0.3 resolution: "cross-spawn@npm:7.0.3" @@ -8435,6 +9595,141 @@ __metadata: languageName: node linkType: hard +"cspell-config-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-config-lib@npm:8.3.2" + dependencies: + "@cspell/cspell-types": 8.3.2 + comment-json: ^4.2.3 + yaml: ^2.3.4 + checksum: 7d5563c2a49f13e9e2b965258f8d5539e282c75234f4375831db3f7ee3c209aa246f07f8bbbc7e7195c248edfdc3a58cb590c5119ad866446ee47df07a593481 + languageName: node + linkType: hard + +"cspell-dictionary@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-dictionary@npm:8.3.2" + dependencies: + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + cspell-trie-lib: 8.3.2 + fast-equals: ^5.0.1 + gensequence: ^6.0.0 + checksum: 374e8cc94b7d46230b8a3ddb066c14bee746963c01b13a60bf43cfb2e6da1a32dcf963c573016aee3fdf8ffbda6253850679d4c93257de2bf4ec45d4cbaf5888 + languageName: node + linkType: hard + +"cspell-gitignore@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-gitignore@npm:8.3.2" + dependencies: + cspell-glob: 8.3.2 + find-up-simple: ^1.0.0 + bin: + cspell-gitignore: bin.mjs + checksum: 0ae847391993078bca601214812aa9476311d501fcc3a20ceb6e4a22825335188354f71328b0ebc45d6ed35756c6d29062cb4e4b2c26a28a9fb57d3573113b01 + languageName: node + linkType: hard + +"cspell-glob@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-glob@npm:8.3.2" + dependencies: + micromatch: ^4.0.5 + checksum: c1abb55fdfde46014963ccd73451218bec7cd9bc0f3cb9308efa0cf066c7783eb69749400aa0d0c1507fbac3fe598ccc26eb5932985db01264ca933f7564a5b2 + languageName: node + linkType: hard + +"cspell-grammar@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-grammar@npm:8.3.2" + dependencies: + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + bin: + cspell-grammar: bin.mjs + checksum: 3f48e267f6c26c98f7f24159460bad1b341e0560f00a2e62761ad49ba3fcc8e953af3e4a712e598b2525242cb3dddfb339d85e9917fd72aca33ab292cea71bc9 + languageName: node + linkType: hard + +"cspell-io@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-io@npm:8.3.2" + dependencies: + "@cspell/cspell-service-bus": 8.3.2 + checksum: 609f544d6c23dd471548cbc531ad5d704db4ffaa0513611cdc5f731e9f4de24057dcbc4c1d11e94df3725129549fe2629c9e73750f7de4d3a61eae2cbef13ff1 + languageName: node + linkType: hard + +"cspell-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-lib@npm:8.3.2" + dependencies: + "@cspell/cspell-bundled-dicts": 8.3.2 + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-resolver": 8.3.2 + "@cspell/cspell-types": 8.3.2 + "@cspell/dynamic-import": 8.3.2 + "@cspell/strong-weak-map": 8.3.2 + clear-module: ^4.1.2 + comment-json: ^4.2.3 + configstore: ^6.0.0 + cspell-config-lib: 8.3.2 + cspell-dictionary: 8.3.2 + cspell-glob: 8.3.2 + cspell-grammar: 8.3.2 + cspell-io: 8.3.2 + cspell-trie-lib: 8.3.2 + fast-equals: ^5.0.1 + gensequence: ^6.0.0 + import-fresh: ^3.3.0 + resolve-from: ^5.0.0 + vscode-languageserver-textdocument: ^1.0.11 + vscode-uri: ^3.0.8 + checksum: 398f8b799aa4b7e60b9a0b2e751fbd0d3d81f8ccdc0b331668568ab5f4cced0e64ddc8325a7901ae950fd3838f8fb37bb4dd8f73938fb78b50bdd5f2f5668375 + languageName: node + linkType: hard + +"cspell-trie-lib@npm:8.3.2": + version: 8.3.2 + resolution: "cspell-trie-lib@npm:8.3.2" + dependencies: + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + gensequence: ^6.0.0 + checksum: 43d3bcf4c1bf1d8fce9601beec52a4f5bd2e27ccd1ea737aca2c32dbec721942cb8671b6add6b57d3a5953389a494a28878a53abccc49de7c59bbb7e78413c3e + languageName: node + linkType: hard + +"cspell@npm:^8.3.2": + version: 8.3.2 + resolution: "cspell@npm:8.3.2" + dependencies: + "@cspell/cspell-json-reporter": 8.3.2 + "@cspell/cspell-pipe": 8.3.2 + "@cspell/cspell-types": 8.3.2 + "@cspell/dynamic-import": 8.3.2 + chalk: ^5.3.0 + chalk-template: ^1.1.0 + commander: ^11.1.0 + cspell-gitignore: 8.3.2 + cspell-glob: 8.3.2 + cspell-io: 8.3.2 + cspell-lib: 8.3.2 + fast-glob: ^3.3.2 + fast-json-stable-stringify: ^2.1.0 + file-entry-cache: ^8.0.0 + get-stdin: ^9.0.0 + semver: ^7.5.4 + strip-ansi: ^7.1.0 + vscode-uri: ^3.0.8 + bin: + cspell: bin.mjs + cspell-esm: bin.mjs + checksum: c798482b411ba94d8dc475445082d56b70fc23d88752f3e6e8e18174f247d35a0c50ed1a1a9f578cdfb1294dc8d175613f1aa7828f92b1f841047580156ab986 + languageName: node + linkType: hard + "css-declaration-sorter@npm:^6.3.1": version: 6.4.1 resolution: "css-declaration-sorter@npm:6.4.1" @@ -8636,10 +9931,10 @@ __metadata: languageName: node linkType: hard -"data-uri-to-buffer@npm:^4.0.0": - version: 4.0.1 - resolution: "data-uri-to-buffer@npm:4.0.1" - checksum: 0d0790b67ffec5302f204c2ccca4494f70b4e2d940fea3d36b09f0bb2b8539c2e86690429eb1f1dc4bcc9e4df0644193073e63d9ee48ac9fce79ec1506e4aa4c +"data-uri-to-buffer@npm:^6.0.0": + version: 6.0.1 + resolution: "data-uri-to-buffer@npm:6.0.1" + checksum: 9140e68c585ae33d950f5943bd476751346c8b789ae80b01a578a33cb8f7f706d1ca7378aff2b1878b2a6d9a8c88c55cc286d88191c8b8ead8255c3c4d934530 languageName: node linkType: hard @@ -8796,6 +10091,15 @@ __metadata: languageName: node linkType: hard +"deferred-leveldown@npm:~0.2.0": + version: 0.2.0 + resolution: "deferred-leveldown@npm:0.2.0" + dependencies: + abstract-leveldown: ~0.12.1 + checksum: f7690ec5b1e951e6f56998be26dd0a1331ef28cb7eaa9e090a282780d47dc006effd4b82a2a82b636cae801378047997aca10c0b44b09c8624633cdb96b07913 + languageName: node + linkType: hard + "define-data-property@npm:^1.0.1, define-data-property@npm:^1.1.1": version: 1.1.1 resolution: "define-data-property@npm:1.1.1" @@ -8821,7 +10125,7 @@ __metadata: languageName: node linkType: hard -"define-properties@npm:^1.2.1": +"define-properties@npm:^1.1.3, define-properties@npm:^1.2.1": version: 1.2.1 resolution: "define-properties@npm:1.2.1" dependencies: @@ -8832,6 +10136,17 @@ __metadata: languageName: node linkType: hard +"degenerator@npm:^5.0.0": + version: 5.0.1 + resolution: "degenerator@npm:5.0.1" + dependencies: + ast-types: ^0.13.4 + escodegen: ^2.1.0 + esprima: ^4.0.1 + checksum: a64fa39cdf6c2edd75188157d32338ee9de7193d7dbb2aeb4acb1eb30fa4a15ed80ba8dae9bd4d7b085472cf174a5baf81adb761aaa8e326771392c922084152 + languageName: node + linkType: hard + "del@npm:^6.1.1": version: 6.1.1 resolution: "del@npm:6.1.1" @@ -8955,6 +10270,13 @@ __metadata: languageName: node linkType: hard +"devtools-protocol@npm:0.0.1147663": + version: 0.0.1147663 + resolution: "devtools-protocol@npm:0.0.1147663" + checksum: 0631f2b6c6cd7f56e7d62a85bfc291f7e167f0f2de90969ef61fb24e2bd546b2e9530043d2bc3fe6c4d7a9e00473004272d2c2832a10a05e4b75c03a22f549fc + languageName: node + linkType: hard + "diff@npm:5.0.0": version: 5.0.0 resolution: "diff@npm:5.0.0" @@ -8969,7 +10291,7 @@ __metadata: languageName: node linkType: hard -"diff@npm:^5.0.0": +"diff@npm:^5.0.0, diff@npm:^5.1.0": version: 5.1.0 resolution: "diff@npm:5.1.0" checksum: c7bf0df7c9bfbe1cf8a678fd1b2137c4fb11be117a67bc18a0e03ae75105e8533dbfb1cda6b46beb3586ef5aed22143ef9d70713977d5fb1f9114e21455fba90 @@ -9015,7 +10337,7 @@ __metadata: "@noir-lang/noir_js": "workspace:*" "@noir-lang/noirc_abi": "workspace:*" "@noir-lang/types": "workspace:*" - "@signorecello/noir_playground": ^0.6.0 + "@signorecello/noir_playground": ^0.7.0 "@types/prettier": ^3 axios: ^1.4.0 clsx: ^1.2.1 @@ -9172,7 +10494,7 @@ __metadata: languageName: node linkType: hard -"duplexer@npm:^0.1.2, duplexer@npm:~0.1.1": +"duplexer@npm:^0.1.2": version: 0.1.2 resolution: "duplexer@npm:0.1.2" checksum: 62ba61a830c56801db28ff6305c7d289b6dc9f859054e8c982abd8ee0b0a14d2e9a8e7d086ffee12e868d43e2bbe8a964be55ddbd8c8957714c87373c7a4f9b0 @@ -9282,7 +10604,7 @@ __metadata: languageName: node linkType: hard -"enhanced-resolve@npm:^5.15.0": +"enhanced-resolve@npm:^5.0.0, enhanced-resolve@npm:^5.15.0": version: 5.15.0 resolution: "enhanced-resolve@npm:5.15.0" dependencies: @@ -9323,6 +10645,15 @@ __metadata: languageName: node linkType: hard +"envinfo@npm:^7.7.3": + version: 7.11.0 + resolution: "envinfo@npm:7.11.0" + bin: + envinfo: dist/cli.js + checksum: c45a7d20409d5f4cda72483b150d3816b15b434f2944d72c1495d8838bd7c4e7b2f32c12128ffb9b92b5f66f436237b8a525eb3a9a5da2d20013bc4effa28aef + languageName: node + linkType: hard + "err-code@npm:^2.0.2": version: 2.0.3 resolution: "err-code@npm:2.0.3" @@ -9330,6 +10661,17 @@ __metadata: languageName: node linkType: hard +"errno@npm:^0.1.1, errno@npm:~0.1.1": + version: 0.1.8 + resolution: "errno@npm:0.1.8" + dependencies: + prr: ~1.0.1 + bin: + errno: cli.js + checksum: 1271f7b9fbb3bcbec76ffde932485d1e3561856d21d847ec613a9722ee924cdd4e523a62dc71a44174d91e898fe21fdc8d5b50823f4b5e0ce8c35c8271e6ef4a + languageName: node + linkType: hard + "error-ex@npm:^1.3.1": version: 1.3.2 resolution: "error-ex@npm:1.3.2" @@ -9556,6 +10898,24 @@ __metadata: languageName: node linkType: hard +"escodegen@npm:^2.1.0": + version: 2.1.0 + resolution: "escodegen@npm:2.1.0" + dependencies: + esprima: ^4.0.1 + estraverse: ^5.2.0 + esutils: ^2.0.2 + source-map: ~0.6.1 + dependenciesMeta: + source-map: + optional: true + bin: + escodegen: bin/escodegen.js + esgenerate: bin/esgenerate.js + checksum: 096696407e161305cd05aebb95134ad176708bc5cb13d0dcc89a5fcbb959b8ed757e7f2591a5f8036f8f4952d4a724de0df14cd419e29212729fa6df5ce16bf6 + languageName: node + linkType: hard + "eslint-plugin-prettier@npm:^5.0.0": version: 5.0.1 resolution: "eslint-plugin-prettier@npm:5.0.1" @@ -9661,7 +11021,7 @@ __metadata: languageName: node linkType: hard -"esprima@npm:^4.0.0": +"esprima@npm:^4.0.0, esprima@npm:^4.0.1": version: 4.0.1 resolution: "esprima@npm:4.0.1" bin: @@ -9769,6 +11129,13 @@ __metadata: languageName: node linkType: hard +"estree-walker@npm:^2.0.2": + version: 2.0.2 + resolution: "estree-walker@npm:2.0.2" + checksum: 6151e6f9828abe2259e57f5fd3761335bb0d2ebd76dc1a01048ccee22fabcfef3c0859300f6d83ff0d1927849368775ec5a6d265dde2f6de5a1be1721cd94efc + languageName: node + linkType: hard + "estree-walker@npm:^3.0.0": version: 3.0.3 resolution: "estree-walker@npm:3.0.3" @@ -9932,18 +11299,10 @@ __metadata: languageName: node linkType: hard -"event-stream@npm:=3.3.4": - version: 3.3.4 - resolution: "event-stream@npm:3.3.4" - dependencies: - duplexer: ~0.1.1 - from: ~0 - map-stream: ~0.1.0 - pause-stream: 0.0.11 - split: 0.3 - stream-combiner: ~0.0.4 - through: ~2.3.1 - checksum: 80b467820b6daf824d9fb4345d2daf115a056e5c104463f2e98534e92d196a27f2df5ea2aa085624db26f4c45698905499e881d13bc7c01f7a13eac85be72a22 +"event-target-shim@npm:^5.0.0": + version: 5.0.1 + resolution: "event-target-shim@npm:5.0.1" + checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166 languageName: node linkType: hard @@ -9954,7 +11313,7 @@ __metadata: languageName: node linkType: hard -"events@npm:^3.2.0": +"events@npm:^3.2.0, events@npm:^3.3.0": version: 3.3.0 resolution: "events@npm:3.3.0" checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780 @@ -10099,7 +11458,21 @@ __metadata: languageName: node linkType: hard -"fast-glob@npm:^3.2.11, fast-glob@npm:^3.2.12, fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.0": +"fast-equals@npm:^5.0.1": + version: 5.0.1 + resolution: "fast-equals@npm:5.0.1" + checksum: fbb3b6a74f3a0fa930afac151ff7d01639159b4fddd2678b5d50708e0ba38e9ec14602222d10dadb8398187342692c04fbef5a62b1cfcc7942fe03e754e064bc + languageName: node + linkType: hard + +"fast-fifo@npm:^1.1.0, fast-fifo@npm:^1.2.0": + version: 1.3.2 + resolution: "fast-fifo@npm:1.3.2" + checksum: 6bfcba3e4df5af7be3332703b69a7898a8ed7020837ec4395bb341bd96cc3a6d86c3f6071dd98da289618cf2234c70d84b2a6f09a33dd6f988b1ff60d8e54275 + languageName: node + linkType: hard + +"fast-glob@npm:^3.2.11, fast-glob@npm:^3.2.12, fast-glob@npm:^3.2.9, fast-glob@npm:^3.3.0, fast-glob@npm:^3.3.2": version: 3.3.2 resolution: "fast-glob@npm:3.3.2" dependencies: @@ -10112,7 +11485,7 @@ __metadata: languageName: node linkType: hard -"fast-json-stable-stringify@npm:^2.0.0": +"fast-json-stable-stringify@npm:^2.0.0, fast-json-stable-stringify@npm:^2.1.0": version: 2.1.0 resolution: "fast-json-stable-stringify@npm:2.1.0" checksum: b191531e36c607977e5b1c47811158733c34ccb3bfde92c44798929e9b4154884378536d26ad90dfecd32e1ffc09c545d23535ad91b3161a27ddbb8ebe0cbecb @@ -10142,6 +11515,13 @@ __metadata: languageName: node linkType: hard +"fastest-levenshtein@npm:^1.0.12": + version: 1.0.16 + resolution: "fastest-levenshtein@npm:1.0.16" + checksum: a78d44285c9e2ae2c25f3ef0f8a73f332c1247b7ea7fb4a191e6bb51aa6ee1ef0dfb3ed113616dcdc7023e18e35a8db41f61c8d88988e877cf510df8edafbc71 + languageName: node + linkType: hard + "fastq@npm:^1.6.0": version: 1.15.0 resolution: "fastq@npm:1.15.0" @@ -10187,16 +11567,6 @@ __metadata: languageName: node linkType: hard -"fetch-blob@npm:^3.1.2, fetch-blob@npm:^3.1.4": - version: 3.2.0 - resolution: "fetch-blob@npm:3.2.0" - dependencies: - node-domexception: ^1.0.0 - web-streams-polyfill: ^3.0.3 - checksum: f19bc28a2a0b9626e69fd7cf3a05798706db7f6c7548da657cbf5026a570945f5eeaedff52007ea35c8bcd3d237c58a20bf1543bc568ab2422411d762dd3d5bf - languageName: node - linkType: hard - "fflate@npm:^0.8.0, fflate@npm:^0.8.1": version: 0.8.1 resolution: "fflate@npm:0.8.1" @@ -10213,6 +11583,15 @@ __metadata: languageName: node linkType: hard +"file-entry-cache@npm:^8.0.0": + version: 8.0.0 + resolution: "file-entry-cache@npm:8.0.0" + dependencies: + flat-cache: ^4.0.0 + checksum: f67802d3334809048c69b3d458f672e1b6d26daefda701761c81f203b80149c35dea04d78ea4238969dd617678e530876722a0634c43031a0957f10cc3ed190f + languageName: node + linkType: hard + "file-loader@npm:^6.2.0": version: 6.2.0 resolution: "file-loader@npm:6.2.0" @@ -10286,6 +11665,13 @@ __metadata: languageName: node linkType: hard +"find-up-simple@npm:^1.0.0": + version: 1.0.0 + resolution: "find-up-simple@npm:1.0.0" + checksum: 91c3d51c1111b5eb4e6e6d71d21438f6571a37a69dc288d4222b98996756e2f472fa5393a4dddb5e1a84929405d87e86f4bdce798ba84ee513b79854960ec140 + languageName: node + linkType: hard + "find-up@npm:5.0.0, find-up@npm:^5.0.0": version: 5.0.0 resolution: "find-up@npm:5.0.0" @@ -10345,6 +11731,17 @@ __metadata: languageName: node linkType: hard +"flat-cache@npm:^4.0.0": + version: 4.0.0 + resolution: "flat-cache@npm:4.0.0" + dependencies: + flatted: ^3.2.9 + keyv: ^4.5.4 + rimraf: ^5.0.5 + checksum: 744d5f111aeecdfb963faab7089230c737a90c325137251b4fe144fd76932e19738a861e356c5ee828bb310592b42a1da667912d74d0403f1f4ef75be8bfdbac + languageName: node + linkType: hard + "flat@npm:^5.0.2": version: 5.0.2 resolution: "flat@npm:5.0.2" @@ -10371,6 +11768,22 @@ __metadata: languageName: node linkType: hard +"for-each@npm:^0.3.3": + version: 0.3.3 + resolution: "for-each@npm:0.3.3" + dependencies: + is-callable: ^1.1.3 + checksum: 6c48ff2bc63362319c65e2edca4a8e1e3483a2fabc72fbe7feaf8c73db94fc7861bd53bc02c8a66a0c1dd709da6b04eec42e0abdd6b40ce47305ae92a25e5d28 + languageName: node + linkType: hard + +"foreach@npm:~2.0.1": + version: 2.0.6 + resolution: "foreach@npm:2.0.6" + checksum: f7b68494545ee41cbd0b0425ebf5386c265dc38ef2a9b0d5cd91a1b82172e939b4cf9387f8e0ebf6db4e368fc79ed323f2198424d5c774515ac3ed9b08901c0e + languageName: node + linkType: hard + "foreground-child@npm:^3.1.0": version: 3.1.1 resolution: "foreground-child@npm:3.1.1" @@ -10437,15 +11850,6 @@ __metadata: languageName: node linkType: hard -"formdata-polyfill@npm:^4.0.10": - version: 4.0.10 - resolution: "formdata-polyfill@npm:4.0.10" - dependencies: - fetch-blob: ^3.1.2 - checksum: 82a34df292afadd82b43d4a740ce387bc08541e0a534358425193017bf9fb3567875dc5f69564984b1da979979b70703aa73dee715a17b6c229752ae736dd9db - languageName: node - linkType: hard - "forwarded@npm:0.2.0": version: 0.2.0 resolution: "forwarded@npm:0.2.0" @@ -10481,13 +11885,6 @@ __metadata: languageName: node linkType: hard -"from@npm:~0": - version: 0.1.7 - resolution: "from@npm:0.1.7" - checksum: b85125b7890489656eb2e4f208f7654a93ec26e3aefaf3bbbcc0d496fc1941e4405834fcc9fe7333192aa2187905510ace70417bbf9ac6f6f4784a731d986939 - languageName: node - linkType: hard - "fs-constants@npm:^1.0.0": version: 1.0.0 resolution: "fs-constants@npm:1.0.0" @@ -10541,6 +11938,17 @@ __metadata: languageName: node linkType: hard +"fs-extra@npm:^8.1.0": + version: 8.1.0 + resolution: "fs-extra@npm:8.1.0" + dependencies: + graceful-fs: ^4.2.0 + jsonfile: ^4.0.0 + universalify: ^0.1.0 + checksum: bf44f0e6cea59d5ce071bba4c43ca76d216f89e402dc6285c128abc0902e9b8525135aa808adad72c9d5d218e9f4bcc63962815529ff2f684ad532172a284880 + languageName: node + linkType: hard + "fs-extra@npm:^9.0.0": version: 9.1.0 resolution: "fs-extra@npm:9.1.0" @@ -10637,6 +12045,22 @@ __metadata: languageName: node linkType: hard +"fwd-stream@npm:^1.0.4": + version: 1.0.4 + resolution: "fwd-stream@npm:1.0.4" + dependencies: + readable-stream: ~1.0.26-4 + checksum: db4dcf68f214b3fabd6cd9658630dfd1d7ed8d43f7f45408027a90220cd75276e782d1e958821775d7a3a4a83034778e75a097bdc7002c758e8896f76213c65d + languageName: node + linkType: hard + +"gensequence@npm:^6.0.0": + version: 6.0.0 + resolution: "gensequence@npm:6.0.0" + checksum: 7c23404b227647cb033a2c6cf5a5266442409e9dcc6bc140565d28de64adfa03dc474119ae14d776dc177fe63d605d2f4b228fa3bf926145dddd68c8df95d029 + languageName: node + linkType: hard + "gensync@npm:^1.0.0-beta.1, gensync@npm:^1.0.0-beta.2": version: 1.0.0-beta.2 resolution: "gensync@npm:1.0.0-beta.2" @@ -10684,6 +12108,13 @@ __metadata: languageName: node linkType: hard +"get-stdin@npm:^9.0.0": + version: 9.0.0 + resolution: "get-stdin@npm:9.0.0" + checksum: 5972bc34d05932b45512c8e2d67b040f1c1ca8afb95c56cbc480985f2d761b7e37fe90dc8abd22527f062cc5639a6930ff346e9952ae4c11a2d4275869459594 + languageName: node + linkType: hard + "get-stream@npm:^4.1.0": version: 4.1.0 resolution: "get-stream@npm:4.1.0" @@ -10718,6 +12149,18 @@ __metadata: languageName: node linkType: hard +"get-uri@npm:^6.0.1": + version: 6.0.2 + resolution: "get-uri@npm:6.0.2" + dependencies: + basic-ftp: ^5.0.2 + data-uri-to-buffer: ^6.0.0 + debug: ^4.3.4 + fs-extra: ^8.1.0 + checksum: 762de3b0e3d4e7afc966e4ce93be587d70c270590da9b4c8fbff888362656c055838d926903d1774cbfeed4d392b4d6def4b2c06d48c050580070426a3a8629b + languageName: node + linkType: hard + "github-slugger@npm:^1.4.0, github-slugger@npm:^1.5.0": version: 1.5.0 resolution: "github-slugger@npm:1.5.0" @@ -10764,7 +12207,7 @@ __metadata: languageName: node linkType: hard -"glob@npm:^10.2.2, glob@npm:^10.3.10": +"glob@npm:^10.2.2, glob@npm:^10.3.10, glob@npm:^10.3.7": version: 10.3.10 resolution: "glob@npm:10.3.10" dependencies: @@ -10793,6 +12236,15 @@ __metadata: languageName: node linkType: hard +"global-directory@npm:^4.0.1": + version: 4.0.1 + resolution: "global-directory@npm:4.0.1" + dependencies: + ini: 4.1.1 + checksum: 5b4df24438a4e5f21e43fbdd9e54f5e12bb48dce01a0a83b415d8052ce91be2d3a97e0c8f98a535e69649b2190036155e9f0f7d3c62f9318f31bdc3fd4f235f5 + languageName: node + linkType: hard + "global-dirs@npm:^3.0.0": version: 3.0.1 resolution: "global-dirs@npm:3.0.1" @@ -10852,7 +12304,7 @@ __metadata: languageName: node linkType: hard -"globby@npm:^13.1.1, globby@npm:^13.1.2": +"globby@npm:^13.1.1": version: 13.2.2 resolution: "globby@npm:13.2.2" dependencies: @@ -11041,6 +12493,13 @@ __metadata: languageName: node linkType: hard +"has-own-prop@npm:^2.0.0": + version: 2.0.0 + resolution: "has-own-prop@npm:2.0.0" + checksum: ca6336e85ead2295c9603880cbc199e2d3ff7eaea0e9035d68fbc79892e9cf681abc62c0909520f112c671dad9961be2173b21dff951358cc98425c560e789e0 + languageName: node + linkType: hard + "has-property-descriptors@npm:^1.0.0": version: 1.0.1 resolution: "has-property-descriptors@npm:1.0.1" @@ -11516,7 +12975,7 @@ __metadata: languageName: node linkType: hard -"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3": +"html-webpack-plugin@npm:^5.5.0, html-webpack-plugin@npm:^5.5.3, html-webpack-plugin@npm:^5.5.4": version: 5.5.4 resolution: "html-webpack-plugin@npm:5.5.4" dependencies: @@ -11683,7 +13142,7 @@ __metadata: languageName: node linkType: hard -"https-proxy-agent@npm:^7.0.1": +"https-proxy-agent@npm:^7.0.0, https-proxy-agent@npm:^7.0.1, https-proxy-agent@npm:^7.0.2": version: 7.0.2 resolution: "https-proxy-agent@npm:7.0.2" dependencies: @@ -11707,6 +13166,13 @@ __metadata: languageName: node linkType: hard +"hyperdyperid@npm:^1.2.0": + version: 1.2.0 + resolution: "hyperdyperid@npm:1.2.0" + checksum: 210029d1c86926f09109f6317d143f8b056fc38e8dd11b0c3e3205fc6c6ff8429fb55b4b9c2bce065462719ed9d34366eced387aaa0035d93eb76b306a8547ef + languageName: node + linkType: hard + "iconv-lite@npm:0.4.24": version: 0.4.24 resolution: "iconv-lite@npm:0.4.24" @@ -11734,6 +13200,13 @@ __metadata: languageName: node linkType: hard +"idb-wrapper@npm:^1.5.0": + version: 1.7.2 + resolution: "idb-wrapper@npm:1.7.2" + checksum: a5fa3a771166205e2d5d2b93c66bd31571dada3526b59bc0f8583efb091b6b327125f1a964a25a281b85ef1c44af10a3c511652632ad3adf8229a161132d66ae + languageName: node + linkType: hard + "ieee754@npm:^1.1.13, ieee754@npm:^1.2.1": version: 1.2.1 resolution: "ieee754@npm:1.2.1" @@ -11804,6 +13277,25 @@ __metadata: languageName: node linkType: hard +"import-local@npm:^3.0.2": + version: 3.1.0 + resolution: "import-local@npm:3.1.0" + dependencies: + pkg-dir: ^4.2.0 + resolve-cwd: ^3.0.0 + bin: + import-local-fixture: fixtures/cli.js + checksum: bfcdb63b5e3c0e245e347f3107564035b128a414c4da1172a20dc67db2504e05ede4ac2eee1252359f78b0bfd7b19ef180aec427c2fce6493ae782d73a04cddd + languageName: node + linkType: hard + +"import-meta-resolve@npm:^4.0.0": + version: 4.0.0 + resolution: "import-meta-resolve@npm:4.0.0" + checksum: 51c50115fd38e9ba21736f8d7543a58446b92d2cb5f38c9b5ec72426afeb2fb790f82051560a0f16323f44dd73d8d37c07eab5f8dc4635bcdb401daa36727b1a + languageName: node + linkType: hard + "imurmurhash@npm:^0.1.4": version: 0.1.4 resolution: "imurmurhash@npm:0.1.4" @@ -11818,6 +13310,13 @@ __metadata: languageName: node linkType: hard +"indexof@npm:~0.0.1": + version: 0.0.1 + resolution: "indexof@npm:0.0.1" + checksum: 0fb04e8b147b8585d981a6df1564f25bb3678d6fa74e33e5cecc1464b10f78e15e8ef6bb688f135fe5c2844a128fac8a7831cbe5adc81fdcf12681b093dfcc25 + languageName: node + linkType: hard + "infima@npm:0.2.0-alpha.43": version: 0.2.0-alpha.43 resolution: "infima@npm:0.2.0-alpha.43" @@ -11842,7 +13341,7 @@ __metadata: languageName: node linkType: hard -"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.0, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.3": +"inherits@npm:2, inherits@npm:2.0.4, inherits@npm:^2.0.0, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4, inherits@npm:~2.0.1, inherits@npm:~2.0.3": version: 2.0.4 resolution: "inherits@npm:2.0.4" checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 @@ -11863,6 +13362,13 @@ __metadata: languageName: node linkType: hard +"ini@npm:4.1.1": + version: 4.1.1 + resolution: "ini@npm:4.1.1" + checksum: 0e5909554074fbc31824fa5415b0f604de4a665514c96a897a77bf77353a7ad4743927321270e9d0610a9d510ccd1f3cd77422f7cc80d8f4542dbce75476fb6d + languageName: node + linkType: hard + "ini@npm:^1.3.4, ini@npm:^1.3.5, ini@npm:~1.3.0": version: 1.3.8 resolution: "ini@npm:1.3.8" @@ -11914,6 +13420,13 @@ __metadata: languageName: node linkType: hard +"interpret@npm:^2.2.0": + version: 2.2.0 + resolution: "interpret@npm:2.2.0" + checksum: f51efef7cb8d02da16408ffa3504cd6053014c5aeb7bb8c223727e053e4235bf565e45d67028b0c8740d917c603807aa3c27d7bd2f21bf20b6417e2bb3e5fd6e + languageName: node + linkType: hard + "invariant@npm:^2.2.4": version: 2.2.4 resolution: "invariant@npm:2.2.4" @@ -11932,7 +13445,7 @@ __metadata: languageName: node linkType: hard -"ip@npm:^1.1.5": +"ip@npm:^1.1.5, ip@npm:^1.1.8": version: 1.1.8 resolution: "ip@npm:1.1.8" checksum: a2ade53eb339fb0cbe9e69a44caab10d6e3784662285eb5d2677117ee4facc33a64679051c35e0dfdb1a3983a51ce2f5d2cb36446d52e10d01881789b76e28fb @@ -11994,6 +13507,16 @@ __metadata: languageName: node linkType: hard +"is-arguments@npm:^1.0.4": + version: 1.1.1 + resolution: "is-arguments@npm:1.1.1" + dependencies: + call-bind: ^1.0.2 + has-tostringtag: ^1.0.0 + checksum: 7f02700ec2171b691ef3e4d0e3e6c0ba408e8434368504bb593d0d7c891c0dbfda6d19d30808b904a6cb1929bca648c061ba438c39f296c2a8ca083229c49f27 + languageName: node + linkType: hard + "is-arrayish@npm:^0.2.1": version: 0.2.1 resolution: "is-arrayish@npm:0.2.1" @@ -12017,7 +13540,7 @@ __metadata: languageName: node linkType: hard -"is-builtin-module@npm:^3.1.0": +"is-builtin-module@npm:^3.1.0, is-builtin-module@npm:^3.2.1": version: 3.2.1 resolution: "is-builtin-module@npm:3.2.1" dependencies: @@ -12026,6 +13549,13 @@ __metadata: languageName: node linkType: hard +"is-callable@npm:^1.1.3": + version: 1.2.7 + resolution: "is-callable@npm:1.2.7" + checksum: 61fd57d03b0d984e2ed3720fb1c7a897827ea174bd44402878e059542ea8c4aeedee0ea0985998aa5cc2736b2fa6e271c08587addb5b3959ac52cf665173d1ac + languageName: node + linkType: hard + "is-ci@npm:^2.0.0": version: 2.0.0 resolution: "is-ci@npm:2.0.0" @@ -12184,6 +13714,16 @@ __metadata: languageName: node linkType: hard +"is-nan@npm:^1.3.2": + version: 1.3.2 + resolution: "is-nan@npm:1.3.2" + dependencies: + call-bind: ^1.0.0 + define-properties: ^1.1.3 + checksum: 5dfadcef6ad12d3029d43643d9800adbba21cf3ce2ec849f734b0e14ee8da4070d82b15fdb35138716d02587c6578225b9a22779cab34888a139cc43e4e3610a + languageName: node + linkType: hard + "is-npm@npm:^5.0.0": version: 5.0.0 resolution: "is-npm@npm:5.0.0" @@ -12219,6 +13759,13 @@ __metadata: languageName: node linkType: hard +"is-object@npm:~0.1.2": + version: 0.1.2 + resolution: "is-object@npm:0.1.2" + checksum: 7e500b15f4748278ea0a8d43b1283e75e866c055e4a790389087ce652eab8a9343fd74710738f0fdf13a323c31330d65bdcc106f38e9bb7bc0b9c60ae3fd2a2d + languageName: node + linkType: hard + "is-path-cwd@npm:^2.2.0": version: 2.2.0 resolution: "is-path-cwd@npm:2.2.0" @@ -12314,6 +13861,15 @@ __metadata: languageName: node linkType: hard +"is-typed-array@npm:^1.1.3": + version: 1.1.12 + resolution: "is-typed-array@npm:1.1.12" + dependencies: + which-typed-array: ^1.1.11 + checksum: 4c89c4a3be07186caddadf92197b17fda663a9d259ea0d44a85f171558270d36059d1c386d34a12cba22dfade5aba497ce22778e866adc9406098c8fc4771796 + languageName: node + linkType: hard + "is-typedarray@npm:^1.0.0": version: 1.0.0 resolution: "is-typedarray@npm:1.0.0" @@ -12365,6 +13921,13 @@ __metadata: languageName: node linkType: hard +"is@npm:~0.2.6": + version: 0.2.7 + resolution: "is@npm:0.2.7" + checksum: 45cea1e6deb41150b5753e18041a833657313e9c791c73f96fb9014b613346f5af2e6650858ef50ea6262c79555b65e09b13d30a268139863885025dd65f1059 + languageName: node + linkType: hard + "isarray@npm:0.0.1": version: 0.0.1 resolution: "isarray@npm:0.0.1" @@ -12386,6 +13949,13 @@ __metadata: languageName: node linkType: hard +"isbuffer@npm:~0.0.0": + version: 0.0.0 + resolution: "isbuffer@npm:0.0.0" + checksum: 9796296d3c493974c1f71ccf3170cc8007217a19ce8b3b9dedffd32e8ccc3ac42473b572bbf1b24b86143e826ea157aead11fd1285389518abab76c7da5f50ed + languageName: node + linkType: hard + "isexe@npm:^2.0.0": version: 2.0.0 resolution: "isexe@npm:2.0.0" @@ -12590,6 +14160,29 @@ __metadata: languageName: node linkType: hard +"json-joy@npm:^9.2.0": + version: 9.9.1 + resolution: "json-joy@npm:9.9.1" + dependencies: + arg: ^5.0.2 + hyperdyperid: ^1.2.0 + peerDependencies: + quill-delta: ^5 + rxjs: 7 + tslib: 2 + bin: + jj: bin/jj.js + json-pack: bin/json-pack.js + json-pack-test: bin/json-pack-test.js + json-patch: bin/json-patch.js + json-patch-test: bin/json-patch-test.js + json-pointer: bin/json-pointer.js + json-pointer-test: bin/json-pointer-test.js + json-unpack: bin/json-unpack.js + checksum: d165398682f00019796225faf365cd8d060f3e086af39bb5081c30907b7e52eaf13697d1c0f6ee2b010fe255ae1fd776e05ad7d6ee5fb549e98fe982f560884b + languageName: node + linkType: hard + "json-parse-even-better-errors@npm:^2.3.0, json-parse-even-better-errors@npm:^2.3.1": version: 2.3.1 resolution: "json-parse-even-better-errors@npm:2.3.1" @@ -12671,6 +14264,13 @@ __metadata: languageName: node linkType: hard +"just-extend@npm:^4.0.2": + version: 4.2.1 + resolution: "just-extend@npm:4.2.1" + checksum: ff9fdede240fad313efeeeb68a660b942e5586d99c0058064c78884894a2690dc09bba44c994ad4e077e45d913fef01a9240c14a72c657b53687ac58de53b39c + languageName: node + linkType: hard + "katex@npm:^0.16.0": version: 0.16.9 resolution: "katex@npm:0.16.9" @@ -12712,7 +14312,7 @@ __metadata: languageName: node linkType: hard -"keyv@npm:^4.5.3": +"keyv@npm:^4.5.3, keyv@npm:^4.5.4": version: 4.5.4 resolution: "keyv@npm:4.5.4" dependencies: @@ -12756,13 +14356,6 @@ __metadata: languageName: node linkType: hard -"kleur@npm:^4.0.3": - version: 4.1.5 - resolution: "kleur@npm:4.1.5" - checksum: 1dc476e32741acf0b1b5b0627ffd0d722e342c1b0da14de3e8ae97821327ca08f9fb944542fb3c126d90ac5f27f9d804edbe7c585bf7d12ef495d115e0f22c12 - languageName: node - linkType: hard - "koa-compose@npm:^4.1.0": version: 4.1.0 resolution: "koa-compose@npm:4.1.0" @@ -12869,6 +14462,94 @@ __metadata: languageName: node linkType: hard +"level-blobs@npm:^0.1.7": + version: 0.1.7 + resolution: "level-blobs@npm:0.1.7" + dependencies: + level-peek: 1.0.6 + once: ^1.3.0 + readable-stream: ^1.0.26-4 + checksum: e3cf78ef0bc64ff350edb4e247b2689cd4f5facf1119694ca8c96c28a05a38dc9d88e0bd065b18af65330bc22f5d588719a5c3e63adaa5feba5ea7913f87bebe + languageName: node + linkType: hard + +"level-filesystem@npm:^1.0.1": + version: 1.2.0 + resolution: "level-filesystem@npm:1.2.0" + dependencies: + concat-stream: ^1.4.4 + errno: ^0.1.1 + fwd-stream: ^1.0.4 + level-blobs: ^0.1.7 + level-peek: ^1.0.6 + level-sublevel: ^5.2.0 + octal: ^1.0.0 + once: ^1.3.0 + xtend: ^2.2.0 + checksum: a29e6a9d8c1879d43610113d1bcb59368685ec0ae413fcf0f8dcbb0a0c26b88fcf16f7481acb2b4650e5951ba0635e73a2c8fbe25cd599c50f80949a5547a367 + languageName: node + linkType: hard + +"level-fix-range@npm:2.0": + version: 2.0.0 + resolution: "level-fix-range@npm:2.0.0" + dependencies: + clone: ~0.1.9 + checksum: 250cefa69e1035d1412b4ba3e5cab83cceb894aa833fb0a93417d8d6230c60f6f8154feffbd0f116461ddd441b909e7df1323355d3e1769b3bb20a55729145b5 + languageName: node + linkType: hard + +"level-fix-range@npm:~1.0.2": + version: 1.0.2 + resolution: "level-fix-range@npm:1.0.2" + checksum: 6c9a3894ea08947fae79c41b75e8b9d57979523b656bec43c589f2dc4455276a150df445d9a7ca880a7c58c2ef19f5cea7f661d777993b870f4943af6b31d5bb + languageName: node + linkType: hard + +"level-hooks@npm:>=4.4.0 <5": + version: 4.5.0 + resolution: "level-hooks@npm:4.5.0" + dependencies: + string-range: ~1.2 + checksum: f198ad2e0901a4719e324e67f546097589af79665ebaaabee7122fda18a41ada3158bb1816b8b82430f30c68610125e4e20b5c09ec3ba7ae262d97dba34f48ab + languageName: node + linkType: hard + +"level-js@npm:^2.1.3": + version: 2.2.4 + resolution: "level-js@npm:2.2.4" + dependencies: + abstract-leveldown: ~0.12.0 + idb-wrapper: ^1.5.0 + isbuffer: ~0.0.0 + ltgt: ^2.1.2 + typedarray-to-buffer: ~1.0.0 + xtend: ~2.1.2 + checksum: 4fed784fcfad4bc6ec97d9c3897e95eaa30326fcdab9f4c7437624d10fa875fa84aafcc2acac0d53181af506cbc012c03f413b4da12ff83758d3bcbb699f8c8e + languageName: node + linkType: hard + +"level-peek@npm:1.0.6, level-peek@npm:^1.0.6": + version: 1.0.6 + resolution: "level-peek@npm:1.0.6" + dependencies: + level-fix-range: ~1.0.2 + checksum: e07d5f8b80675727204d9a226a249139da9e354e633b9d57b7a5186a7b85be445e550ca628f5133bf7a220a9311a193ded5a3f83588dc4eaa53ffb86b426154a + languageName: node + linkType: hard + +"level-sublevel@npm:^5.2.0": + version: 5.2.3 + resolution: "level-sublevel@npm:5.2.3" + dependencies: + level-fix-range: 2.0 + level-hooks: ">=4.4.0 <5" + string-range: ~1.2.1 + xtend: ~2.0.4 + checksum: f0fdffc2f9ca289aa183a1bf7f300a8f92e4f01be60eab37ab36e1f6ec33ed449519d8f69504a616e82f3ddca13a15fa4e19af1dcc1beba9044a4c60b6cd94bf + languageName: node + linkType: hard + "level-supports@npm:^4.0.0": version: 4.0.1 resolution: "level-supports@npm:4.0.1" @@ -12896,6 +14577,21 @@ __metadata: languageName: node linkType: hard +"levelup@npm:^0.18.2": + version: 0.18.6 + resolution: "levelup@npm:0.18.6" + dependencies: + bl: ~0.8.1 + deferred-leveldown: ~0.2.0 + errno: ~0.1.1 + prr: ~0.0.0 + readable-stream: ~1.0.26 + semver: ~2.3.1 + xtend: ~3.0.0 + checksum: 80e140dd83dc94050e283fc02874ae85116cb560d81e14fee0ac111f86006887835ec905dca7a081414c07eca202245a580f1e02f696367b777ecc23a9e05b86 + languageName: node + linkType: hard + "leven@npm:^3.1.0": version: 3.1.0 resolution: "leven@npm:3.1.0" @@ -13030,6 +14726,13 @@ __metadata: languageName: node linkType: hard +"lodash.get@npm:^4.4.2": + version: 4.4.2 + resolution: "lodash.get@npm:4.4.2" + checksum: e403047ddb03181c9d0e92df9556570e2b67e0f0a930fcbbbd779370972368f5568e914f913e93f3b08f6d492abc71e14d4e9b7a18916c31fa04bd2306efe545 + languageName: node + linkType: hard + "lodash.isequal@npm:^4.5.0": version: 4.5.0 resolution: "lodash.isequal@npm:4.5.0" @@ -13169,6 +14872,13 @@ __metadata: languageName: node linkType: hard +"lru-cache@npm:^7.14.1": + version: 7.18.3 + resolution: "lru-cache@npm:7.18.3" + checksum: e550d772384709deea3f141af34b6d4fa392e2e418c1498c078de0ee63670f1f46f5eee746e8ef7e69e1c895af0d4224e62ee33e66a543a14763b0f2e74c1356 + languageName: node + linkType: hard + "lru-cache@npm:^8.0.4": version: 8.0.5 resolution: "lru-cache@npm:8.0.5" @@ -13183,6 +14893,13 @@ __metadata: languageName: node linkType: hard +"ltgt@npm:^2.1.2": + version: 2.2.1 + resolution: "ltgt@npm:2.2.1" + checksum: 7e3874296f7538bc8087b428ac4208008d7b76916354b34a08818ca7c83958c1df10ec427eeeaad895f6b81e41e24745b18d30f89abcc21d228b94f6961d50a2 + languageName: node + linkType: hard + "lunr-languages@npm:^1.4.0": version: 1.14.0 resolution: "lunr-languages@npm:1.14.0" @@ -13241,13 +14958,6 @@ __metadata: languageName: node linkType: hard -"map-stream@npm:~0.1.0": - version: 0.1.0 - resolution: "map-stream@npm:0.1.0" - checksum: 38abbe4eb883888031e6b2fc0630bc583c99396be16b8ace5794b937b682a8a081f03e8b15bfd4914d1bc88318f0e9ac73ba3512ae65955cd449f63256ddb31d - languageName: node - linkType: hard - "mark.js@npm:^8.11.1": version: 8.11.1 resolution: "mark.js@npm:8.11.1" @@ -13648,6 +15358,18 @@ __metadata: languageName: node linkType: hard +"memfs@npm:^4.6.0": + version: 4.6.0 + resolution: "memfs@npm:4.6.0" + dependencies: + json-joy: ^9.2.0 + thingies: ^1.11.1 + peerDependencies: + tslib: 2 + checksum: b32a35bee9f96dc011605f3bb39e74e6d2a5de51c952a77bb38a0dfabd3381c40ae382d27f385aa290edee8081597fb1a3b41a07bb3f775fd55312dc30ac1d9d + languageName: node + linkType: hard + "memory-level@npm:^1.0.0": version: 1.0.0 resolution: "memory-level@npm:1.0.0" @@ -14207,7 +15929,7 @@ __metadata: languageName: node linkType: hard -"micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5": +"micromatch@npm:^4.0.0, micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5": version: 4.0.5 resolution: "micromatch@npm:4.0.5" dependencies: @@ -14479,6 +16201,15 @@ __metadata: languageName: node linkType: hard +"mocha-each@npm:^2.0.1": + version: 2.0.1 + resolution: "mocha-each@npm:2.0.1" + dependencies: + sprintf-js: ^1.0.3 + checksum: 0de01ce517c2f7e7c3e19ef3f444809913f2f1602cb2571e6a3f8cb7ef3040f4f01b0f9f11a317e4ec1aeb9d39ceae4947c96668560cf638fe4d02ea549c2d4c + languageName: node + linkType: hard + "mocha@npm:^10.0.0, mocha@npm:^10.2.0": version: 10.2.0 resolution: "mocha@npm:10.2.0" @@ -14546,13 +16277,6 @@ __metadata: languageName: node linkType: hard -"mri@npm:^1.1.0": - version: 1.2.0 - resolution: "mri@npm:1.2.0" - checksum: 83f515abbcff60150873e424894a2f65d68037e5a7fcde8a9e2b285ee9c13ac581b63cfc1e6826c4732de3aeb84902f7c1e16b7aff46cd3f897a0f757a894e85 - languageName: node - linkType: hard - "mrmime@npm:^1.0.0": version: 1.0.1 resolution: "mrmime@npm:1.0.1" @@ -14646,6 +16370,26 @@ __metadata: languageName: node linkType: hard +"netmask@npm:^2.0.2": + version: 2.0.2 + resolution: "netmask@npm:2.0.2" + checksum: c65cb8d3f7ea5669edddb3217e4c96910a60d0d9a4b52d9847ff6b28b2d0277cd8464eee0ef85133cdee32605c57940cacdd04a9a019079b091b6bba4cb0ec22 + languageName: node + linkType: hard + +"nise@npm:^5.1.5": + version: 5.1.5 + resolution: "nise@npm:5.1.5" + dependencies: + "@sinonjs/commons": ^2.0.0 + "@sinonjs/fake-timers": ^10.0.2 + "@sinonjs/text-encoding": ^0.7.1 + just-extend: ^4.0.2 + path-to-regexp: ^1.7.0 + checksum: c763dc62c5796cafa5c9268e14a5b34db6e6fa2f1dbc57a891fe5d7ea632a87868e22b5bb34965006f984630793ea11368351e94971163228d9e20b2e88edce8 + languageName: node + linkType: hard + "no-case@npm:^3.0.4": version: 3.0.4 resolution: "no-case@npm:3.0.4" @@ -14665,13 +16409,6 @@ __metadata: languageName: node linkType: hard -"node-domexception@npm:^1.0.0": - version: 1.0.0 - resolution: "node-domexception@npm:1.0.0" - checksum: ee1d37dd2a4eb26a8a92cd6b64dfc29caec72bff5e1ed9aba80c294f57a31ba4895a60fd48347cf17dd6e766da0ae87d75657dfd1f384ebfa60462c2283f5c7f - languageName: node - linkType: hard - "node-emoji@npm:^1.10.0": version: 1.11.0 resolution: "node-emoji@npm:1.11.0" @@ -14707,14 +16444,17 @@ __metadata: languageName: node linkType: hard -"node-fetch@npm:3.2.10": - version: 3.2.10 - resolution: "node-fetch@npm:3.2.10" +"node-fetch@npm:^2.6.12": + version: 2.7.0 + resolution: "node-fetch@npm:2.7.0" dependencies: - data-uri-to-buffer: ^4.0.0 - fetch-blob: ^3.1.4 - formdata-polyfill: ^4.0.10 - checksum: e65322431f4897ded04197aa5923eaec63a8d53e00432de4e70a4f7006625c8dc32629c5c35f4fe8ee719a4825544d07bf53f6e146a7265914262f493e8deac1 + whatwg-url: ^5.0.0 + peerDependencies: + encoding: ^0.1.0 + peerDependenciesMeta: + encoding: + optional: true + checksum: d76d2f5edb451a3f05b15115ec89fc6be39de37c6089f1b6368df03b91e1633fd379a7e01b7ab05089a25034b2023d959b47e59759cb38d88341b2459e89d6e5 languageName: node linkType: hard @@ -14857,6 +16597,16 @@ __metadata: languageName: node linkType: hard +"object-is@npm:^1.1.5": + version: 1.1.5 + resolution: "object-is@npm:1.1.5" + dependencies: + call-bind: ^1.0.2 + define-properties: ^1.1.3 + checksum: 989b18c4cba258a6b74dc1d74a41805c1a1425bce29f6cabb50dcb1a6a651ea9104a1b07046739a49a5bb1bc49727bcb00efd5c55f932f6ea04ec8927a7901fe + languageName: node + linkType: hard + "object-keys@npm:^1.1.1": version: 1.1.1 resolution: "object-keys@npm:1.1.1" @@ -14864,7 +16614,25 @@ __metadata: languageName: node linkType: hard -"object.assign@npm:^4.1.0": +"object-keys@npm:~0.2.0": + version: 0.2.0 + resolution: "object-keys@npm:0.2.0" + dependencies: + foreach: ~2.0.1 + indexof: ~0.0.1 + is: ~0.2.6 + checksum: 4b96bab88fe9df22a03aec3c59a084bdffc789ad1318a39081e6b8389af6b9ab8571dd3776eed3ec5831137d057fb7ba76911552c6a6efd59b5d126ac3b6e432 + languageName: node + linkType: hard + +"object-keys@npm:~0.4.0": + version: 0.4.0 + resolution: "object-keys@npm:0.4.0" + checksum: 1be3ebe9b48c0d5eda8e4a30657d887a748cb42435e0e2eaf49faf557bdd602cd2b7558b8ce90a4eb2b8592d16b875a1900bce859cbb0f35b21c67e11a45313c + languageName: node + linkType: hard + +"object.assign@npm:^4.1.0, object.assign@npm:^4.1.4": version: 4.1.5 resolution: "object.assign@npm:4.1.5" dependencies: @@ -14890,6 +16658,13 @@ __metadata: languageName: node linkType: hard +"octal@npm:^1.0.0": + version: 1.0.0 + resolution: "octal@npm:1.0.0" + checksum: d648917f4f0a1042d7a4e230262aed00274c9791fe4795e9a2ce3b64ab7f2ca93e62cd55ca5ad4e4bd3fc375ca84d6919d7bf417be461790c1042503ac2c2310 + languageName: node + linkType: hard + "on-finished@npm:2.4.1, on-finished@npm:^2.3.0": version: 2.4.1 resolution: "on-finished@npm:2.4.1" @@ -15146,6 +16921,33 @@ __metadata: languageName: node linkType: hard +"pac-proxy-agent@npm:^7.0.0": + version: 7.0.1 + resolution: "pac-proxy-agent@npm:7.0.1" + dependencies: + "@tootallnate/quickjs-emscripten": ^0.23.0 + agent-base: ^7.0.2 + debug: ^4.3.4 + get-uri: ^6.0.1 + http-proxy-agent: ^7.0.0 + https-proxy-agent: ^7.0.2 + pac-resolver: ^7.0.0 + socks-proxy-agent: ^8.0.2 + checksum: 3d4aa48ec1c19db10158ecc1c4c9a9f77792294412d225ceb3dfa45d5a06950dca9755e2db0d9b69f12769119bea0adf2b24390d9c73c8d81df75e28245ae451 + languageName: node + linkType: hard + +"pac-resolver@npm:^7.0.0": + version: 7.0.0 + resolution: "pac-resolver@npm:7.0.0" + dependencies: + degenerator: ^5.0.0 + ip: ^1.1.8 + netmask: ^2.0.2 + checksum: fa3a898c09848e93e35f5e23443fea36ddb393a851c76a23664a5bf3fcbe58ff77a0bcdae1e4f01b9ea87ea493c52e14d97a0fe39f92474d14cd45559c6e3cde + languageName: node + linkType: hard + "package-json@npm:^6.3.0": version: 6.5.0 resolution: "package-json@npm:6.5.0" @@ -15170,6 +16972,13 @@ __metadata: languageName: node linkType: hard +"pako@npm:^2.1.0": + version: 2.1.0 + resolution: "pako@npm:2.1.0" + checksum: 71666548644c9a4d056bcaba849ca6fd7242c6cf1af0646d3346f3079a1c7f4a66ffec6f7369ee0dc88f61926c10d6ab05da3e1fca44b83551839e89edd75a3e + languageName: node + linkType: hard + "param-case@npm:^3.0.4": version: 3.0.4 resolution: "param-case@npm:3.0.4" @@ -15189,6 +16998,15 @@ __metadata: languageName: node linkType: hard +"parent-module@npm:^2.0.0": + version: 2.0.0 + resolution: "parent-module@npm:2.0.0" + dependencies: + callsites: ^3.1.0 + checksum: f131f13d687a938556a01033561fb1b274b39921eb4425c7a691f0d91dcfbe9b19759c2b8d425a3ee7c8a46874e57fa418a690643880c3c7c56827aba12f78dd + languageName: node + linkType: hard + "parse-entities@npm:^2.0.0": version: 2.0.0 resolution: "parse-entities@npm:2.0.0" @@ -15281,6 +17099,13 @@ __metadata: languageName: node linkType: hard +"path-browserify@npm:^1.0.1": + version: 1.0.1 + resolution: "path-browserify@npm:1.0.1" + checksum: c6d7fa376423fe35b95b2d67990060c3ee304fc815ff0a2dc1c6c3cfaff2bd0d572ee67e18f19d0ea3bbe32e8add2a05021132ac40509416459fffee35200699 + languageName: node + linkType: hard + "path-exists@npm:^3.0.0": version: 3.0.0 resolution: "path-exists@npm:3.0.0" @@ -15384,15 +17209,6 @@ __metadata: languageName: node linkType: hard -"pause-stream@npm:0.0.11": - version: 0.0.11 - resolution: "pause-stream@npm:0.0.11" - dependencies: - through: ~2.3 - checksum: 3c4a14052a638b92e0c96eb00c0d7977df7f79ea28395250c525d197f1fc02d34ce1165d5362e2e6ebbb251524b94a76f3f0d4abc39ab8b016d97449fe15583c - languageName: node - linkType: hard - "pbkdf2@npm:^3.0.17": version: 3.1.2 resolution: "pbkdf2@npm:3.1.2" @@ -15438,7 +17254,7 @@ __metadata: languageName: node linkType: hard -"pkg-dir@npm:^4.1.0": +"pkg-dir@npm:^4.1.0, pkg-dir@npm:^4.2.0": version: 4.2.0 resolution: "pkg-dir@npm:4.2.0" dependencies: @@ -16036,6 +17852,13 @@ __metadata: languageName: node linkType: hard +"process@npm:^0.11.10": + version: 0.11.10 + resolution: "process@npm:0.11.10" + checksum: bfcce49814f7d172a6e6a14d5fa3ac92cc3d0c3b9feb1279774708a719e19acd673995226351a082a9ae99978254e320ccda4240ddc474ba31a76c79491ca7c3 + languageName: node + linkType: hard + "progress@npm:2.0.3": version: 2.0.3 resolution: "progress@npm:2.0.3" @@ -16107,6 +17930,22 @@ __metadata: languageName: node linkType: hard +"proxy-agent@npm:6.3.0": + version: 6.3.0 + resolution: "proxy-agent@npm:6.3.0" + dependencies: + agent-base: ^7.0.2 + debug: ^4.3.4 + http-proxy-agent: ^7.0.0 + https-proxy-agent: ^7.0.0 + lru-cache: ^7.14.1 + pac-proxy-agent: ^7.0.0 + proxy-from-env: ^1.1.0 + socks-proxy-agent: ^8.0.1 + checksum: e3fb0633d665e352ed4efe23ae5616b8301423dfa4ff1c5975d093da8a636181a97391f7a91c6a7ffae17c1a305df855e95507f73bcdafda8876198c64b88f5b + languageName: node + linkType: hard + "proxy-from-env@npm:1.1.0, proxy-from-env@npm:^1.1.0": version: 1.1.0 resolution: "proxy-from-env@npm:1.1.0" @@ -16114,14 +17953,17 @@ __metadata: languageName: node linkType: hard -"ps-tree@npm:^1.2.0": - version: 1.2.0 - resolution: "ps-tree@npm:1.2.0" - dependencies: - event-stream: =3.3.4 - bin: - ps-tree: ./bin/ps-tree.js - checksum: e635dd00f53d30d31696cf5f95b3a8dbdf9b1aeb36d4391578ce8e8cd22949b7c5536c73b0dc18c78615ea3ddd4be96101166be59ca2e3e3cb1e2f79ba3c7f98 +"prr@npm:~0.0.0": + version: 0.0.0 + resolution: "prr@npm:0.0.0" + checksum: 6552d9d92d9d55ec1afb8952ad80f81bbb1b4379f24ff7c506ad083ea701caf1bf6d4b092a2baeb98ec3f312c5a49d8bdf1d9b20a6db2998d05c2d52aa6a82e7 + languageName: node + linkType: hard + +"prr@npm:~1.0.1": + version: 1.0.1 + resolution: "prr@npm:1.0.1" + checksum: 3bca2db0479fd38f8c4c9439139b0c42dcaadcc2fbb7bb8e0e6afaa1383457f1d19aea9e5f961d5b080f1cfc05bfa1fe9e45c97a1d3fd6d421950a73d3108381 languageName: node linkType: hard @@ -16135,7 +17977,7 @@ __metadata: languageName: node linkType: hard -"punycode@npm:^1.3.2": +"punycode@npm:^1.3.2, punycode@npm:^1.4.1": version: 1.4.1 resolution: "punycode@npm:1.4.1" checksum: fa6e698cb53db45e4628559e557ddaf554103d2a96a1d62892c8f4032cd3bc8871796cae9eabc1bc700e2b6677611521ce5bb1d9a27700086039965d0cf34518 @@ -16191,6 +18033,25 @@ __metadata: languageName: node linkType: hard +"puppeteer-core@npm:^20.0.0": + version: 20.9.0 + resolution: "puppeteer-core@npm:20.9.0" + dependencies: + "@puppeteer/browsers": 1.4.6 + chromium-bidi: 0.4.16 + cross-fetch: 4.0.0 + debug: 4.3.4 + devtools-protocol: 0.0.1147663 + ws: 8.13.0 + peerDependencies: + typescript: ">= 4.7.4" + peerDependenciesMeta: + typescript: + optional: true + checksum: d298598445b0f2032c02d0ed7d1d18a8d2d2fcaf6fc31fc96e93e2669a7fc6fbee0338bd9b8c8f8822887f18a8fb680b77bb56e96fe1928baadb52292bbd93b4 + languageName: node + linkType: hard + "qs@npm:6.11.0": version: 6.11.0 resolution: "qs@npm:6.11.0" @@ -16200,7 +18061,7 @@ __metadata: languageName: node linkType: hard -"qs@npm:^6.5.2": +"qs@npm:^6.11.2, qs@npm:^6.5.2": version: 6.11.2 resolution: "qs@npm:6.11.2" dependencies: @@ -16216,6 +18077,13 @@ __metadata: languageName: node linkType: hard +"queue-tick@npm:^1.0.1": + version: 1.0.1 + resolution: "queue-tick@npm:1.0.1" + checksum: 57c3292814b297f87f792fbeb99ce982813e4e54d7a8bdff65cf53d5c084113913289d4a48ec8bbc964927a74b847554f9f4579df43c969a6c8e0f026457ad01 + languageName: node + linkType: hard + "queue@npm:6.0.2": version: 6.0.2 resolution: "queue@npm:6.0.2" @@ -16488,7 +18356,19 @@ __metadata: languageName: node linkType: hard -"readable-stream@npm:^2.0.1": +"readable-stream@npm:^1.0.26-4": + version: 1.1.14 + resolution: "readable-stream@npm:1.1.14" + dependencies: + core-util-is: ~1.0.0 + inherits: ~2.0.1 + isarray: 0.0.1 + string_decoder: ~0.10.x + checksum: 17dfeae3e909945a4a1abc5613ea92d03269ef54c49288599507fc98ff4615988a1c39a999dcf9aacba70233d9b7040bc11a5f2bfc947e262dedcc0a8b32b5a0 + languageName: node + linkType: hard + +"readable-stream@npm:^2.0.1, readable-stream@npm:^2.2.2": version: 2.3.8 resolution: "readable-stream@npm:2.3.8" dependencies: @@ -16514,6 +18394,31 @@ __metadata: languageName: node linkType: hard +"readable-stream@npm:^4.4.2": + version: 4.4.2 + resolution: "readable-stream@npm:4.4.2" + dependencies: + abort-controller: ^3.0.0 + buffer: ^6.0.3 + events: ^3.3.0 + process: ^0.11.10 + string_decoder: ^1.3.0 + checksum: 6f4063763dbdb52658d22d3f49ca976420e1fbe16bbd241f744383715845350b196a2f08b8d6330f8e219153dff34b140aeefd6296da828e1041a7eab1f20d5e + languageName: node + linkType: hard + +"readable-stream@npm:~1.0.26, readable-stream@npm:~1.0.26-4": + version: 1.0.34 + resolution: "readable-stream@npm:1.0.34" + dependencies: + core-util-is: ~1.0.0 + inherits: ~2.0.1 + isarray: 0.0.1 + string_decoder: ~0.10.x + checksum: 85042c537e4f067daa1448a7e257a201070bfec3dd2706abdbd8ebc7f3418eb4d3ed4b8e5af63e2544d69f88ab09c28d5da3c0b77dc76185fddd189a59863b60 + languageName: node + linkType: hard + "readdirp@npm:~3.6.0": version: 3.6.0 resolution: "readdirp@npm:3.6.0" @@ -16539,6 +18444,15 @@ __metadata: languageName: node linkType: hard +"rechoir@npm:^0.7.0": + version: 0.7.1 + resolution: "rechoir@npm:0.7.1" + dependencies: + resolve: ^1.9.0 + checksum: 2a04aab4e28c05fcd6ee6768446bc8b859d8f108e71fc7f5bcbc5ef25e53330ce2c11d10f82a24591a2df4c49c4f61feabe1fd11f844c66feedd4cd7bb61146a + languageName: node + linkType: hard + "recursive-readdir@npm:^2.2.2": version: 2.2.3 resolution: "recursive-readdir@npm:2.2.3" @@ -16700,15 +18614,6 @@ __metadata: languageName: node linkType: hard -"release-tests@workspace:release-tests": - version: 0.0.0-use.local - resolution: "release-tests@workspace:release-tests" - dependencies: - uvu: 0.5.6 - zx: 7.1.1 - languageName: unknown - linkType: soft - "remark-directive@npm:^3.0.0": version: 3.0.0 resolution: "remark-directive@npm:3.0.0" @@ -16898,7 +18803,7 @@ __metadata: languageName: node linkType: hard -"repeat-string@npm:^1.5.4": +"repeat-string@npm:^1.5.4, repeat-string@npm:^1.6.1": version: 1.6.1 resolution: "repeat-string@npm:1.6.1" checksum: 1b809fc6db97decdc68f5b12c4d1a671c8e3f65ec4a40c238bc5200e44e85bcc52a54f78268ab9c29fcf5fe4f1343e805420056d1f30fa9a9ee4c2d93e3cc6c0 @@ -16940,6 +18845,15 @@ __metadata: languageName: node linkType: hard +"resolve-cwd@npm:^3.0.0": + version: 3.0.0 + resolution: "resolve-cwd@npm:3.0.0" + dependencies: + resolve-from: ^5.0.0 + checksum: 546e0816012d65778e580ad62b29e975a642989108d9a3c5beabfb2304192fa3c9f9146fbdfe213563c6ff51975ae41bac1d3c6e047dd9572c94863a057b4d81 + languageName: node + linkType: hard + "resolve-from@npm:^4.0.0": version: 4.0.0 resolution: "resolve-from@npm:4.0.0" @@ -16947,6 +18861,13 @@ __metadata: languageName: node linkType: hard +"resolve-from@npm:^5.0.0": + version: 5.0.0 + resolution: "resolve-from@npm:5.0.0" + checksum: 4ceeb9113e1b1372d0cd969f3468fa042daa1dd9527b1b6bb88acb6ab55d8b9cd65dbf18819f9f9ddf0db804990901dcdaade80a215e7b2c23daae38e64f5bdf + languageName: node + linkType: hard + "resolve-path@npm:^1.4.0": version: 1.4.0 resolution: "resolve-path@npm:1.4.0" @@ -16980,7 +18901,7 @@ __metadata: languageName: node linkType: hard -"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.3.2": +"resolve@npm:^1.1.6, resolve@npm:^1.14.2, resolve@npm:^1.19.0, resolve@npm:^1.22.1, resolve@npm:^1.3.2, resolve@npm:^1.9.0": version: 1.22.8 resolution: "resolve@npm:1.22.8" dependencies: @@ -17002,7 +18923,7 @@ __metadata: languageName: node linkType: hard -"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.3.2#~builtin": +"resolve@patch:resolve@^1.1.6#~builtin, resolve@patch:resolve@^1.14.2#~builtin, resolve@patch:resolve@^1.19.0#~builtin, resolve@patch:resolve@^1.22.1#~builtin, resolve@patch:resolve@^1.3.2#~builtin, resolve@patch:resolve@^1.9.0#~builtin": version: 1.22.8 resolution: "resolve@patch:resolve@npm%3A1.22.8#~builtin::version=1.22.8&hash=c3c19d" dependencies: @@ -17086,6 +19007,17 @@ __metadata: languageName: node linkType: hard +"rimraf@npm:^5.0.5": + version: 5.0.5 + resolution: "rimraf@npm:5.0.5" + dependencies: + glob: ^10.3.7 + bin: + rimraf: dist/esm/bin.mjs + checksum: d66eef829b2e23b16445f34e73d75c7b7cf4cbc8834b04720def1c8f298eb0753c3d76df77325fad79d0a2c60470525d95f89c2475283ad985fd7441c32732d1 + languageName: node + linkType: hard + "ripemd160@npm:^2.0.0, ripemd160@npm:^2.0.1": version: 2.0.2 resolution: "ripemd160@npm:2.0.2" @@ -17121,6 +19053,60 @@ __metadata: languageName: node linkType: hard +"rollup@npm:^4.4.0": + version: 4.9.4 + resolution: "rollup@npm:4.9.4" + dependencies: + "@rollup/rollup-android-arm-eabi": 4.9.4 + "@rollup/rollup-android-arm64": 4.9.4 + "@rollup/rollup-darwin-arm64": 4.9.4 + "@rollup/rollup-darwin-x64": 4.9.4 + "@rollup/rollup-linux-arm-gnueabihf": 4.9.4 + "@rollup/rollup-linux-arm64-gnu": 4.9.4 + "@rollup/rollup-linux-arm64-musl": 4.9.4 + "@rollup/rollup-linux-riscv64-gnu": 4.9.4 + "@rollup/rollup-linux-x64-gnu": 4.9.4 + "@rollup/rollup-linux-x64-musl": 4.9.4 + "@rollup/rollup-win32-arm64-msvc": 4.9.4 + "@rollup/rollup-win32-ia32-msvc": 4.9.4 + "@rollup/rollup-win32-x64-msvc": 4.9.4 + "@types/estree": 1.0.5 + fsevents: ~2.3.2 + dependenciesMeta: + "@rollup/rollup-android-arm-eabi": + optional: true + "@rollup/rollup-android-arm64": + optional: true + "@rollup/rollup-darwin-arm64": + optional: true + "@rollup/rollup-darwin-x64": + optional: true + "@rollup/rollup-linux-arm-gnueabihf": + optional: true + "@rollup/rollup-linux-arm64-gnu": + optional: true + "@rollup/rollup-linux-arm64-musl": + optional: true + "@rollup/rollup-linux-riscv64-gnu": + optional: true + "@rollup/rollup-linux-x64-gnu": + optional: true + "@rollup/rollup-linux-x64-musl": + optional: true + "@rollup/rollup-win32-arm64-msvc": + optional: true + "@rollup/rollup-win32-ia32-msvc": + optional: true + "@rollup/rollup-win32-x64-msvc": + optional: true + fsevents: + optional: true + bin: + rollup: dist/bin/rollup + checksum: 134b1fd8886a1dc86b2cadada979174e736a39aec12d069261fe8b799ad0c4aa3213188ea49adeee155669315016617260e43eea754436c50121aa359899da4d + languageName: node + linkType: hard + "rtl-detect@npm:^1.0.4": version: 1.1.2 resolution: "rtl-detect@npm:1.1.2" @@ -17185,15 +19171,6 @@ __metadata: languageName: node linkType: hard -"sade@npm:^1.7.3": - version: 1.8.1 - resolution: "sade@npm:1.8.1" - dependencies: - mri: ^1.1.0 - checksum: 0756e5b04c51ccdc8221ebffd1548d0ce5a783a44a0fa9017a026659b97d632913e78f7dca59f2496aa996a0be0b0c322afd87ca72ccd909406f49dbffa0f45d - languageName: node - linkType: hard - "safe-buffer@npm:5.1.2, safe-buffer@npm:~5.1.0, safe-buffer@npm:~5.1.1": version: 5.1.2 resolution: "safe-buffer@npm:5.1.2" @@ -17369,6 +19346,15 @@ __metadata: languageName: node linkType: hard +"semver@npm:~2.3.1": + version: 2.3.2 + resolution: "semver@npm:2.3.2" + bin: + semver: ./bin/semver + checksum: e0649fb18a1da909df7b5a6f586314a7f6e052385fc1e6eafa7084dd77c0787e755ab35ca491f9eec986fe1d0d6d36eae85a21eb7e2ed32ae5906796acb92c56 + languageName: node + linkType: hard + "send@npm:0.18.0": version: 0.18.0 resolution: "send@npm:0.18.0" @@ -17606,6 +19592,20 @@ __metadata: languageName: node linkType: hard +"sinon@npm:^17.0.1": + version: 17.0.1 + resolution: "sinon@npm:17.0.1" + dependencies: + "@sinonjs/commons": ^3.0.0 + "@sinonjs/fake-timers": ^11.2.2 + "@sinonjs/samsam": ^8.0.0 + diff: ^5.1.0 + nise: ^5.1.5 + supports-color: ^7.2.0 + checksum: a807c2997d6eabdcaa4409df9fd9816a3e839f96d7e5d76610a33f5e1b60cf37616c6288f0f580262da17ea4ee626c6d1600325bf423e30c5a7f0d9a203e26c0 + languageName: node + linkType: hard + "sirv@npm:^2.0.3": version: 2.0.3 resolution: "sirv@npm:2.0.3" @@ -17697,7 +19697,7 @@ __metadata: languageName: node linkType: hard -"socks-proxy-agent@npm:^8.0.1": +"socks-proxy-agent@npm:^8.0.1, socks-proxy-agent@npm:^8.0.2": version: 8.0.2 resolution: "socks-proxy-agent@npm:8.0.2" dependencies: @@ -17768,14 +19768,14 @@ __metadata: languageName: node linkType: hard -"source-map@npm:^0.6.0, source-map@npm:^0.6.1, source-map@npm:~0.6.0": +"source-map@npm:^0.6.0, source-map@npm:^0.6.1, source-map@npm:~0.6.0, source-map@npm:~0.6.1": version: 0.6.1 resolution: "source-map@npm:0.6.1" checksum: 59ce8640cf3f3124f64ac289012c2b8bd377c238e316fb323ea22fbfe83da07d81e000071d7242cad7a23cd91c7de98e4df8830ec3f133cb6133a5f6e9f67bc2 languageName: node linkType: hard -"source-map@npm:^0.7.0, source-map@npm:^0.7.3": +"source-map@npm:^0.7.0, source-map@npm:^0.7.3, source-map@npm:^0.7.4": version: 0.7.4 resolution: "source-map@npm:0.7.4" checksum: 01cc5a74b1f0e1d626a58d36ad6898ea820567e87f18dfc9d24a9843a351aaa2ec09b87422589906d6ff1deed29693e176194dc88bcae7c9a852dc74b311dbf5 @@ -17823,12 +19823,10 @@ __metadata: languageName: node linkType: hard -"split@npm:0.3": - version: 0.3.3 - resolution: "split@npm:0.3.3" - dependencies: - through: 2 - checksum: 2e076634c9637cfdc54ab4387b6a243b8c33b360874a25adf6f327a5647f07cb3bf1c755d515248eb3afee4e382278d01f62c62d87263c118f28065b86f74f02 +"sprintf-js@npm:^1.0.3": + version: 1.1.3 + resolution: "sprintf-js@npm:1.1.3" + checksum: a3fdac7b49643875b70864a9d9b469d87a40dfeaf5d34d9d0c5b1cda5fd7d065531fcb43c76357d62254c57184a7b151954156563a4d6a747015cfb41021cad0 languageName: node linkType: hard @@ -17906,15 +19904,6 @@ __metadata: languageName: node linkType: hard -"stream-combiner@npm:~0.0.4": - version: 0.0.4 - resolution: "stream-combiner@npm:0.0.4" - dependencies: - duplexer: ~0.1.1 - checksum: 844b622cfe8b9de45a6007404f613b60aaf85200ab9862299066204242f89a7c8033b1c356c998aa6cfc630f6cd9eba119ec1c6dc1f93e245982be4a847aee7d - languageName: node - linkType: hard - "stream-read-all@npm:^3.0.1": version: 3.0.1 resolution: "stream-read-all@npm:3.0.1" @@ -17922,6 +19911,16 @@ __metadata: languageName: node linkType: hard +"streamx@npm:^2.15.0": + version: 2.15.6 + resolution: "streamx@npm:2.15.6" + dependencies: + fast-fifo: ^1.1.0 + queue-tick: ^1.0.1 + checksum: 37a245f5cee4c33fcb8b018ccb935bad6eab423f05b0d14d018e63dbd2670bb109a69442e961a195b750c2c774f613c19476d11bd727d645eedb655d2dba234b + languageName: node + linkType: hard + "string-format@npm:^2.0.0": version: 2.0.0 resolution: "string-format@npm:2.0.0" @@ -17929,6 +19928,13 @@ __metadata: languageName: node linkType: hard +"string-range@npm:~1.2, string-range@npm:~1.2.1": + version: 1.2.2 + resolution: "string-range@npm:1.2.2" + checksum: 7118cc83a7e63fca5fd8bef9b61464bfc51197b5f6dc475c9e1d24a93ce02fa27f7adb4cd7adac5daf599bde442b383608078f9b051bddb108d3b45840923097 + languageName: node + linkType: hard + "string-to-stream@npm:^3.0.1": version: 3.0.1 resolution: "string-to-stream@npm:3.0.1" @@ -17960,7 +19966,7 @@ __metadata: languageName: node linkType: hard -"string_decoder@npm:^1.1.1": +"string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0": version: 1.3.0 resolution: "string_decoder@npm:1.3.0" dependencies: @@ -17969,6 +19975,13 @@ __metadata: languageName: node linkType: hard +"string_decoder@npm:~0.10.x": + version: 0.10.31 + resolution: "string_decoder@npm:0.10.31" + checksum: fe00f8e303647e5db919948ccb5ce0da7dea209ab54702894dd0c664edd98e5d4df4b80d6fabf7b9e92b237359d21136c95bf068b2f7760b772ca974ba970202 + languageName: node + linkType: hard + "string_decoder@npm:~1.1.1": version: 1.1.1 resolution: "string_decoder@npm:1.1.1" @@ -18008,7 +20021,7 @@ __metadata: languageName: node linkType: hard -"strip-ansi@npm:^7.0.1": +"strip-ansi@npm:^7.0.1, strip-ansi@npm:^7.1.0": version: 7.1.0 resolution: "strip-ansi@npm:7.1.0" dependencies: @@ -18125,7 +20138,7 @@ __metadata: languageName: node linkType: hard -"supports-color@npm:^7.1.0": +"supports-color@npm:^7.1.0, supports-color@npm:^7.2.0": version: 7.2.0 resolution: "supports-color@npm:7.2.0" dependencies: @@ -18230,6 +20243,17 @@ __metadata: languageName: node linkType: hard +"tar-fs@npm:3.0.4": + version: 3.0.4 + resolution: "tar-fs@npm:3.0.4" + dependencies: + mkdirp-classic: ^0.5.2 + pump: ^3.0.0 + tar-stream: ^3.1.5 + checksum: dcf4054f9e92ca0efe61c2b3f612914fb259a47900aa908a63106513a6d006c899b426ada53eb88d9dbbf089b5724c8e90b96a2c4ca6171845fa14203d734e30 + languageName: node + linkType: hard + "tar-stream@npm:^2.1.4": version: 2.2.0 resolution: "tar-stream@npm:2.2.0" @@ -18243,6 +20267,17 @@ __metadata: languageName: node linkType: hard +"tar-stream@npm:^3.1.5": + version: 3.1.6 + resolution: "tar-stream@npm:3.1.6" + dependencies: + b4a: ^1.6.4 + fast-fifo: ^1.2.0 + streamx: ^2.15.0 + checksum: f3627f918581976e954ff03cb8d370551053796b82564f8c7ca8fac84c48e4d042026d0854fc222171a34ff9c682b72fae91be9c9b0a112d4c54f9e4f443e9c5 + languageName: node + linkType: hard + "tar@npm:^6.1.11, tar@npm:^6.1.2": version: 6.2.0 resolution: "tar@npm:6.2.0" @@ -18300,7 +20335,16 @@ __metadata: languageName: node linkType: hard -"through@npm:2, through@npm:^2.3.8, through@npm:~2.3, through@npm:~2.3.1": +"thingies@npm:^1.11.1": + version: 1.15.0 + resolution: "thingies@npm:1.15.0" + peerDependencies: + tslib: ^2 + checksum: 9721bc0cbcf1565ba1abd70cfe5141a638c88dded74abb24d9b21749e16034af37dfa68edd37e7d1bb1b1c9c8bfb301c4699a8e4bc998a3a9c18052edb829bb2 + languageName: node + linkType: hard + +"through@npm:^2.3.8": version: 2.3.8 resolution: "through@npm:2.3.8" checksum: a38c3e059853c494af95d50c072b83f8b676a9ba2818dcc5b108ef252230735c54e0185437618596c790bbba8fcdaef5b290405981ffa09dce67b1f1bf190cbd @@ -18462,6 +20506,22 @@ __metadata: languageName: node linkType: hard +"ts-loader@npm:^9.5.1": + version: 9.5.1 + resolution: "ts-loader@npm:9.5.1" + dependencies: + chalk: ^4.1.0 + enhanced-resolve: ^5.0.0 + micromatch: ^4.0.0 + semver: ^7.3.4 + source-map: ^0.7.4 + peerDependencies: + typescript: "*" + webpack: ^5.0.0 + checksum: 7cf396e656d905388ea2a9b5e82f16d3c955fda8d3df2fbf219f4bee16ff50a3c995c44ae3e584634e9443f056cec70bb3151add3917ffb4588ecd7394bac0ec + languageName: node + linkType: hard + "ts-node@npm:^10.9.1": version: 10.9.2 resolution: "ts-node@npm:10.9.2" @@ -18536,7 +20596,7 @@ __metadata: languageName: node linkType: hard -"tslib@npm:^2.0.3, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.5.0, tslib@npm:^2.6.0, tslib@npm:^2.6.2": +"tslib@npm:^2.0.1, tslib@npm:^2.0.3, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.5.0, tslib@npm:^2.6.0, tslib@npm:^2.6.2": version: 2.6.2 resolution: "tslib@npm:2.6.2" checksum: 329ea56123005922f39642318e3d1f0f8265d1e7fcb92c633e0809521da75eeaca28d2cf96d7248229deb40e5c19adf408259f4b9640afd20d13aecc1430f3ad @@ -18603,7 +20663,7 @@ __metadata: languageName: node linkType: hard -"type-detect@npm:^4.0.0, type-detect@npm:^4.0.8": +"type-detect@npm:4.0.8, type-detect@npm:^4.0.0, type-detect@npm:^4.0.8": version: 4.0.8 resolution: "type-detect@npm:4.0.8" checksum: 62b5628bff67c0eb0b66afa371bd73e230399a8d2ad30d852716efcc4656a7516904570cd8631a49a3ce57c10225adf5d0cbdcb47f6b0255fe6557c453925a15 @@ -18664,6 +20724,20 @@ __metadata: languageName: node linkType: hard +"typedarray-to-buffer@npm:~1.0.0": + version: 1.0.4 + resolution: "typedarray-to-buffer@npm:1.0.4" + checksum: ac6989c456a0b175c8362b3ebbd8a74af7b9bcc94f9dc9ffd34436569cd29aea6a1e0e5f5752d0d5bd855a55b2520e960d1d4cb9c9149f863ce09220540df17f + languageName: node + linkType: hard + +"typedarray@npm:^0.0.6": + version: 0.0.6 + resolution: "typedarray@npm:0.0.6" + checksum: 33b39f3d0e8463985eeaeeacc3cb2e28bc3dfaf2a5ed219628c0b629d5d7b810b0eb2165f9f607c34871d5daa92ba1dc69f49051cf7d578b4cbd26c340b9d1b1 + languageName: node + linkType: hard + "typedoc-plugin-frontmatter@npm:^0.0.2": version: 0.0.2 resolution: "typedoc-plugin-frontmatter@npm:0.0.2" @@ -19129,6 +21203,15 @@ __metadata: languageName: node linkType: hard +"unzipit@npm:^1.4.3": + version: 1.4.3 + resolution: "unzipit@npm:1.4.3" + dependencies: + uzip-module: ^1.0.2 + checksum: ce29348edab7b5fb5b7b4d43437f48e35812ac8b3cc2d76efd1acfcad6dd1b96b4f96bfd03250a724b87ba99dd531d7727ad24b590acf727dde79f54f5e779ed + languageName: node + linkType: hard + "update-browserslist-db@npm:^1.0.13": version: 1.0.13 resolution: "update-browserslist-db@npm:1.0.13" @@ -19232,6 +21315,16 @@ __metadata: languageName: node linkType: hard +"url@npm:^0.11.3": + version: 0.11.3 + resolution: "url@npm:0.11.3" + dependencies: + punycode: ^1.4.1 + qs: ^6.11.2 + checksum: f9e7886f46a16f96d2e42fbcc5d682c231c55ef5442c1ff66150c0f6556f6e3a97d094a84f51be15ec2432711d212eb60426659ce418f5fcadeaa3f601532c4e + languageName: node + linkType: hard + "util-deprecate@npm:^1.0.1, util-deprecate@npm:^1.0.2, util-deprecate@npm:~1.0.1": version: 1.0.2 resolution: "util-deprecate@npm:1.0.2" @@ -19239,6 +21332,19 @@ __metadata: languageName: node linkType: hard +"util@npm:^0.12.5": + version: 0.12.5 + resolution: "util@npm:0.12.5" + dependencies: + inherits: ^2.0.3 + is-arguments: ^1.0.4 + is-generator-function: ^1.0.7 + is-typed-array: ^1.1.3 + which-typed-array: ^1.1.2 + checksum: 705e51f0de5b446f4edec10739752ac25856541e0254ea1e7e45e5b9f9b0cb105bc4bd415736a6210edc68245a7f903bf085ffb08dd7deb8a0e847f60538a38a + languageName: node + linkType: hard + "utila@npm:~0.4": version: 0.4.0 resolution: "utila@npm:0.4.0" @@ -19269,17 +21375,10 @@ __metadata: languageName: node linkType: hard -"uvu@npm:0.5.6": - version: 0.5.6 - resolution: "uvu@npm:0.5.6" - dependencies: - dequal: ^2.0.0 - diff: ^5.0.0 - kleur: ^4.0.3 - sade: ^1.7.3 - bin: - uvu: bin.js - checksum: 09460a37975627de9fcad396e5078fb844d01aaf64a6399ebfcfd9e55f1c2037539b47611e8631f89be07656962af0cf48c334993db82b9ae9c3d25ce3862168 +"uzip-module@npm:^1.0.2": + version: 1.0.3 + resolution: "uzip-module@npm:1.0.3" + checksum: fc286c44a04d75055577fae8293d3fee499d1e850f87e88c158b1e3657f4794a3a40ca2d34f73474ff82917176dd5ca9d1c0d1e375a083714e11afabd3afa423 languageName: node linkType: hard @@ -19375,6 +21474,13 @@ __metadata: languageName: node linkType: hard +"vscode-languageserver-textdocument@npm:^1.0.11": + version: 1.0.11 + resolution: "vscode-languageserver-textdocument@npm:1.0.11" + checksum: ea7cdc9d4ffaae5952071fa11d17d714215a76444e6936c9359f94b9ba3222a52a55edb5bd5928bd3e9712b900a9f175bb3565ec1c8923234fe3bd327584bafb + languageName: node + linkType: hard + "vscode-oniguruma@npm:^1.7.0": version: 1.7.0 resolution: "vscode-oniguruma@npm:1.7.0" @@ -19389,6 +21495,13 @@ __metadata: languageName: node linkType: hard +"vscode-uri@npm:^3.0.8": + version: 3.0.8 + resolution: "vscode-uri@npm:3.0.8" + checksum: 514249126850c0a41a7d8c3c2836cab35983b9dc1938b903cfa253b9e33974c1416d62a00111385adcfa2b98df456437ab704f709a2ecca76a90134ef5eb4832 + languageName: node + linkType: hard + "wait-on@npm:^6.0.1": version: 6.0.1 resolution: "wait-on@npm:6.0.1" @@ -19404,7 +21517,7 @@ __metadata: languageName: node linkType: hard -"watchpack@npm:^2.4.0": +"watchpack@npm:^2.1.1, watchpack@npm:^2.4.0": version: 2.4.0 resolution: "watchpack@npm:2.4.0" dependencies: @@ -19437,13 +21550,6 @@ __metadata: languageName: node linkType: hard -"web-streams-polyfill@npm:^3.0.3": - version: 3.2.1 - resolution: "web-streams-polyfill@npm:3.2.1" - checksum: b119c78574b6d65935e35098c2afdcd752b84268e18746606af149e3c424e15621b6f1ff0b42b2676dc012fc4f0d313f964b41a4b5031e525faa03997457da02 - languageName: node - linkType: hard - "webidl-conversions@npm:^3.0.0": version: 3.0.1 resolution: "webidl-conversions@npm:3.0.1" @@ -19481,6 +21587,39 @@ __metadata: languageName: node linkType: hard +"webpack-cli@npm:^4.7.2": + version: 4.10.0 + resolution: "webpack-cli@npm:4.10.0" + dependencies: + "@discoveryjs/json-ext": ^0.5.0 + "@webpack-cli/configtest": ^1.2.0 + "@webpack-cli/info": ^1.5.0 + "@webpack-cli/serve": ^1.7.0 + colorette: ^2.0.14 + commander: ^7.0.0 + cross-spawn: ^7.0.3 + fastest-levenshtein: ^1.0.12 + import-local: ^3.0.2 + interpret: ^2.2.0 + rechoir: ^0.7.0 + webpack-merge: ^5.7.3 + peerDependencies: + webpack: 4.x.x || 5.x.x + peerDependenciesMeta: + "@webpack-cli/generators": + optional: true + "@webpack-cli/migrate": + optional: true + webpack-bundle-analyzer: + optional: true + webpack-dev-server: + optional: true + bin: + webpack-cli: bin/cli.js + checksum: 2ff5355ac348e6b40f2630a203b981728834dca96d6d621be96249764b2d0fc01dd54edfcc37f02214d02935de2cf0eefd6ce689d970d154ef493f01ba922390 + languageName: node + linkType: hard + "webpack-dev-middleware@npm:^5.3.1": version: 5.3.3 resolution: "webpack-dev-middleware@npm:5.3.3" @@ -19543,7 +21682,7 @@ __metadata: languageName: node linkType: hard -"webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": +"webpack-merge@npm:^5.7.3, webpack-merge@npm:^5.8.0, webpack-merge@npm:^5.9.0": version: 5.10.0 resolution: "webpack-merge@npm:5.10.0" dependencies: @@ -19561,7 +21700,7 @@ __metadata: languageName: node linkType: hard -"webpack@npm:^5.73.0, webpack@npm:^5.88.1": +"webpack@npm:^5.49.0, webpack@npm:^5.73.0, webpack@npm:^5.88.1": version: 5.89.0 resolution: "webpack@npm:5.89.0" dependencies: @@ -19650,6 +21789,19 @@ __metadata: languageName: node linkType: hard +"which-typed-array@npm:^1.1.11, which-typed-array@npm:^1.1.2": + version: 1.1.13 + resolution: "which-typed-array@npm:1.1.13" + dependencies: + available-typed-arrays: ^1.0.5 + call-bind: ^1.0.4 + for-each: ^0.3.3 + gopd: ^1.0.1 + has-tostringtag: ^1.0.0 + checksum: 3828a0d5d72c800e369d447e54c7620742a4cc0c9baf1b5e8c17e9b6ff90d8d861a3a6dd4800f1953dbf80e5e5cec954a289e5b4a223e3bee4aeb1f8c5f33309 + languageName: node + linkType: hard + "which@npm:^1.3.1": version: 1.3.1 resolution: "which@npm:1.3.1" @@ -19884,6 +22036,13 @@ __metadata: languageName: node linkType: hard +"xtend@npm:^2.2.0": + version: 2.2.0 + resolution: "xtend@npm:2.2.0" + checksum: 9fcd1ddabefdb3c68a698b08177525ad14a6df3423b13bad9a53900d19374e476a43c219b0756d39675776b2326a35fe477c547cfb8a05ae9fea4ba2235bebe2 + languageName: node + linkType: hard + "xtend@npm:^4.0.0, xtend@npm:^4.0.1": version: 4.0.2 resolution: "xtend@npm:4.0.2" @@ -19891,6 +22050,32 @@ __metadata: languageName: node linkType: hard +"xtend@npm:~2.0.4": + version: 2.0.6 + resolution: "xtend@npm:2.0.6" + dependencies: + is-object: ~0.1.2 + object-keys: ~0.2.0 + checksum: 414531e51cbc56d4676ae2b3a4070052e0c7a36caf7ee74f2e8449fe0fc1752b971a776fca5b85ec02ef3d0a33b8e75491d900474b8407f3f4bba3f49325a785 + languageName: node + linkType: hard + +"xtend@npm:~2.1.2": + version: 2.1.2 + resolution: "xtend@npm:2.1.2" + dependencies: + object-keys: ~0.4.0 + checksum: a8b79f31502c163205984eaa2b196051cd2fab0882b49758e30f2f9018255bc6c462e32a090bf3385d1bda04755ad8cc0052a09e049b0038f49eb9b950d9c447 + languageName: node + linkType: hard + +"xtend@npm:~3.0.0": + version: 3.0.0 + resolution: "xtend@npm:3.0.0" + checksum: ecdc4dd74f26e561dbc13d4148fcc7b8f46f49b9259862fc31e42b7cede9eee62af9d869050a7b8e089475e858744a74ceae3f0da2943755ef712f3277ad2e50 + languageName: node + linkType: hard + "y18n@npm:^5.0.5": version: 5.0.8 resolution: "y18n@npm:5.0.8" @@ -19919,7 +22104,7 @@ __metadata: languageName: node linkType: hard -"yaml@npm:^2.1.1, yaml@npm:^2.2.2": +"yaml@npm:^2.2.2, yaml@npm:^2.3.4": version: 2.3.4 resolution: "yaml@npm:2.3.4" checksum: e6d1dae1c6383bcc8ba11796eef3b8c02d5082911c6723efeeb5ba50fc8e881df18d645e64de68e421b577296000bea9c75d6d9097c2f6699da3ae0406c030d8 @@ -20055,26 +22240,3 @@ __metadata: checksum: f22ec5fc2d5f02c423c93d35cdfa83573a3a3bd98c66b927c368ea4d0e7252a500df2a90a6b45522be536a96a73404393c958e945fdba95e6832c200791702b6 languageName: node linkType: hard - -"zx@npm:7.1.1": - version: 7.1.1 - resolution: "zx@npm:7.1.1" - dependencies: - "@types/fs-extra": ^9.0.13 - "@types/minimist": ^1.2.2 - "@types/node": ^18.7.20 - "@types/ps-tree": ^1.1.2 - "@types/which": ^2.0.1 - chalk: ^5.0.1 - fs-extra: ^10.1.0 - globby: ^13.1.2 - minimist: ^1.2.6 - node-fetch: 3.2.10 - ps-tree: ^1.2.0 - which: ^2.0.2 - yaml: ^2.1.1 - bin: - zx: build/cli.js - checksum: 510c56366a62220108247624a83fc088bfc8f393e3f20168d69a67e3c7aff1a4b790c118ee70e58171dcbc91b53ab079bba3074e1c40829f6179a7f48a38be78 - languageName: node - linkType: hard